Skip to content

Commit f35193b

Browse files
author
Yusuke Sugomori
committed
softmax bug fix
1 parent fe808ed commit f35193b

File tree

4 files changed

+15
-6
lines changed

4 files changed

+15
-6
lines changed

DeepBeliefNets.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def test_dbn(pretrain_lr=0.1, pretraining_epochs=1000, k=1, \
164164
rng = numpy.random.RandomState(123)
165165

166166
# construct DBN
167-
dbn = DBN(input=x, label=y, n_ins=6, hidden_layer_sizes=[4, 3], n_outs=2, numpy_rng=rng)
167+
dbn = DBN(input=x, label=y, n_ins=6, hidden_layer_sizes=[3, 3], n_outs=2, numpy_rng=rng)
168168

169169
# pre-training (TrainUnsupervisedDBN)
170170
dbn.pretrain(lr=pretrain_lr, k=1, epochs=pretraining_epochs)
@@ -174,7 +174,10 @@ def test_dbn(pretrain_lr=0.1, pretraining_epochs=1000, k=1, \
174174

175175

176176
# test
177-
x = numpy.array([1, 1, 0, 0, 0, 0])
177+
x = numpy.array([[1, 1, 0, 0, 0, 0],
178+
[0, 0, 0, 1, 1, 0],
179+
[1, 1, 1, 1, 1, 0]])
180+
178181
print dbn.predict(x)
179182

180183

LogisticRegression.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,10 @@ def test_lr(learning_rate=0.01, n_epochs=200):
8787

8888

8989
# test
90-
x = numpy.array([1, 1, 0, 0, 0, 0])
90+
x = numpy.array([[1, 1, 0, 0, 0, 0],
91+
[0, 0, 0, 1, 1, 0],
92+
[1, 1, 1, 1, 1, 0]])
93+
9194
print >> sys.stderr, classifier.predict(x)
9295

9396

RestrictedBoltzmannMachine.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -162,8 +162,8 @@ def test_rbm(learning_rate=0.1, k=1, training_epochs=1000):
162162

163163

164164
# test
165-
v = numpy.array([[0, 0, 0, 1, 1, 0],
166-
[1, 1, 0, 0, 0, 0]])
165+
v = numpy.array([[1, 1, 0, 0, 0, 0],
166+
[0, 0, 0, 1, 1, 0]])
167167

168168
print rbm.reconstruct(v)
169169

utils.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,4 +17,7 @@ def sigmoid_err_handler(type, flg):
1717

1818
def softmax(x):
1919
e = numpy.exp(x - numpy.max(x)) # prevent overflow
20-
return e / numpy.sum(e, axis=0)
20+
if e.ndim == 1:
21+
return e / numpy.sum(e, axis=0)
22+
else:
23+
return e / numpy.array([numpy.sum(e, axis=1)]).T # ndim = 2

0 commit comments

Comments
 (0)