Fixed stable softmax

This commit is contained in:
AntreasAntoniou 2017-11-14 14:56:11 +00:00
parent b9147c269c
commit 5769d7c1f0

View File

@ -155,7 +155,7 @@ class CrossEntropySoftmaxError(object):
Scalar error function value. Scalar error function value.
""" """
normOutputs = outputs - outputs.max(-1)[:, None] normOutputs = outputs - outputs.max(-1)[:, None]
logProb = normOutputs - np.log(np.sum(np.exp(normOutputs))(-1)[:, None]) logProb = normOutputs - np.log(np.sum(np.exp(normOutputs)))
return -np.mean(np.sum(targets * logProb, axis=1)) return -np.mean(np.sum(targets * logProb, axis=1))
def grad(self, outputs, targets): def grad(self, outputs, targets):