Skip to content

Commit 2eed72e

Browse files
author
Yusuke Sugomori
committed
pretrain DBN
1 parent 5b420c4 commit 2eed72e

File tree

1 file changed

+16
-5
lines changed

1 file changed

+16
-5
lines changed

DeepBeliefNets.py

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -96,14 +96,25 @@ def pretrain(self, lr=0.1, k=1, epochs=100):
9696
rbm = self.rbm_layers[i]
9797

9898
for epoch in xrange(epochs):
99-
c = []
10099
rbm.contrastive_divergence(lr=lr, k=k, input=layer_input)
101100
# cost = rbm.get_reconstruction_cross_entropy()
102-
# # c.append(cost)
103101
# print >> sys.stderr, \
104102
# 'Pre-training layer %d, epoch %d, cost ' %(i, epoch), cost
105-
106-
# print numpy.mean(c)
103+
104+
# def pretrain(self, lr=0.1, k=1, epochs=100):
105+
# # pre-train layer-wise
106+
# for i in xrange(self.n_layers):
107+
# rbm = self.rbm_layers[i]
108+
109+
# for epoch in xrange(epochs):
110+
# layer_input = self.x
111+
# for j in xrange(i):
112+
# layer_input = self.sigmoid_layers[j].sample_h_given_v(layer_input)
113+
114+
# rbm.contrastive_divergence(lr=lr, k=k, input=layer_input)
115+
# # cost = rbm.get_reconstruction_cross_entropy()
116+
# # print >> sys.stderr, \
117+
# # 'Pre-training layer %d, epoch %d, cost ' %(i, epoch), cost
107118

108119

109120
def finetune(self, lr=0.1, epochs=100):
@@ -153,7 +164,7 @@ def test_dbn(pretrain_lr=0.1, pretraining_epochs=1000, k=1, \
153164
rng = numpy.random.RandomState(123)
154165

155166
# construct DBN
156-
dbn = DBN(input=x, label=y, n_ins=6, hidden_layer_sizes=[10], n_outs=2, numpy_rng=rng)
167+
dbn = DBN(input=x, label=y, n_ins=6, hidden_layer_sizes=[4, 3], n_outs=2, numpy_rng=rng)
157168

158169
# pre-training (TrainUnsupervisedDBN)
159170
dbn.pretrain(lr=pretrain_lr, k=1, epochs=pretraining_epochs)

0 commit comments

Comments
 (0)