Stabilized softmax use instead of unstable version
This commit is contained in:
parent
e7fc62cd09
commit
6a0cdbea3a
@ -154,7 +154,7 @@ class CrossEntropySoftmaxError(object):
|
|||||||
Returns:
|
Returns:
|
||||||
Scalar error function value.
|
Scalar error function value.
|
||||||
"""
|
"""
|
||||||
probs = np.exp(outputs)
|
probs = np.exp(outputs - outputs.max(-1)[:, None])
|
||||||
probs /= probs.sum(-1)[:, None]
|
probs /= probs.sum(-1)[:, None]
|
||||||
return -np.mean(np.sum(targets * np.log(probs), axis=1))
|
return -np.mean(np.sum(targets * np.log(probs), axis=1))
|
||||||
|
|
||||||
@ -168,7 +168,7 @@ class CrossEntropySoftmaxError(object):
|
|||||||
Returns:
|
Returns:
|
||||||
Gradient of error function with respect to outputs.
|
Gradient of error function with respect to outputs.
|
||||||
"""
|
"""
|
||||||
probs = np.exp(outputs)
|
probs = np.exp(outputs - outputs.max(-1)[:, None])
|
||||||
probs /= probs.sum(-1)[:, None]
|
probs /= probs.sum(-1)[:, None]
|
||||||
return (probs - targets) / outputs.shape[0]
|
return (probs - targets) / outputs.shape[0]
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user