@@ -96,14 +96,25 @@ def pretrain(self, lr=0.1, k=1, epochs=100):
96
96
rbm = self .rbm_layers [i ]
97
97
98
98
for epoch in xrange (epochs ):
99
- c = []
100
99
rbm .contrastive_divergence (lr = lr , k = k , input = layer_input )
101
100
# cost = rbm.get_reconstruction_cross_entropy()
102
- # # c.append(cost)
103
101
# print >> sys.stderr, \
104
102
# 'Pre-training layer %d, epoch %d, cost ' %(i, epoch), cost
105
-
106
- # print numpy.mean(c)
103
+
104
+ # def pretrain(self, lr=0.1, k=1, epochs=100):
105
+ # # pre-train layer-wise
106
+ # for i in xrange(self.n_layers):
107
+ # rbm = self.rbm_layers[i]
108
+
109
+ # for epoch in xrange(epochs):
110
+ # layer_input = self.x
111
+ # for j in xrange(i):
112
+ # layer_input = self.sigmoid_layers[j].sample_h_given_v(layer_input)
113
+
114
+ # rbm.contrastive_divergence(lr=lr, k=k, input=layer_input)
115
+ # # cost = rbm.get_reconstruction_cross_entropy()
116
+ # # print >> sys.stderr, \
117
+ # # 'Pre-training layer %d, epoch %d, cost ' %(i, epoch), cost
107
118
108
119
109
120
def finetune (self , lr = 0.1 , epochs = 100 ):
@@ -153,7 +164,7 @@ def test_dbn(pretrain_lr=0.1, pretraining_epochs=1000, k=1, \
153
164
rng = numpy .random .RandomState (123 )
154
165
155
166
# construct DBN
156
- dbn = DBN (input = x , label = y , n_ins = 6 , hidden_layer_sizes = [10 ], n_outs = 2 , numpy_rng = rng )
167
+ dbn = DBN (input = x , label = y , n_ins = 6 , hidden_layer_sizes = [4 , 3 ], n_outs = 2 , numpy_rng = rng )
157
168
158
169
# pre-training (TrainUnsupervisedDBN)
159
170
dbn .pretrain (lr = pretrain_lr , k = 1 , epochs = pretraining_epochs )
0 commit comments