Expand softmax for usage in FCNs (#5873)

* Expand softmax for usage in FCNs

* Update activations.py
This commit is contained in:
Martin Thoma 2017-03-21 14:42:34 +01:00 committed by François Chollet
parent 7dc09a34f6
commit 31d821d878

@ -4,18 +4,28 @@ from . import backend as K
from .utils.generic_utils import deserialize_keras_object
def softmax(x):
def softmax(x, axis=-1):
"""Softmax activation function.
# Arguments
x : Tensor.
axis: Integer, axis along which the softmax normalization is applied.
# Returns
Tensor, output of softmax transformation.
# Raises
ValueError: In case `dim(x) == 1`.
"""
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
e = K.exp(x - K.max(x, axis=-1, keepdims=True))
s = K.sum(e, axis=-1, keepdims=True)
elif ndim > 2:
e = K.exp(x - K.max(x, axis=axis, keepdims=True))
s = K.sum(e, axis=axis, keepdims=True)
return e / s
else:
raise ValueError('Cannot apply softmax to a tensor '
'that is not 2D or 3D. '
'Here, ndim=' + str(ndim))
raise ValueError('Cannot apply softmax to a tensor that is 1D')
def elu(x, alpha=1.0):