Changed ELU implementation to use native ops (#3845)
This commit is contained in:
parent
cfc9b4d41d
commit
f0d9867d09
@ -15,6 +15,9 @@ def softmax(x):
|
||||
'Here, ndim=' + str(ndim))
|
||||
|
||||
|
||||
def elu(x, alpha=1.0):
|
||||
return K.elu(x, alpha)
|
||||
|
||||
def softplus(x):
|
||||
return K.softplus(x)
|
||||
|
||||
|
@ -1348,6 +1348,19 @@ def relu(x, alpha=0., max_value=None):
|
||||
return x
|
||||
|
||||
|
||||
def elu(x, alpha=1.):
|
||||
""" Exponential linear unit
|
||||
|
||||
# Arguments
|
||||
x: Tensor to compute the activation function for.
|
||||
alpha: scalar
|
||||
"""
|
||||
res = tf.nn.elu(x)
|
||||
if alpha == 1:
|
||||
return res
|
||||
else:
|
||||
return tf.select(x > 0, res, alpha*res)
|
||||
|
||||
def softmax(x):
|
||||
'''Softmax of a tensor.
|
||||
'''
|
||||
|
@ -931,11 +931,26 @@ def in_test_phase(x, alt):
|
||||
|
||||
# NN OPERATIONS
|
||||
|
||||
def _assert_has_capability(module, func):
|
||||
assert hasattr(module, func), ('It looks like like your version of '
|
||||
'Theano is out of date. '
|
||||
'Install the latest version with:\n'
|
||||
'pip install git+git://github.com/Theano/Theano.git --upgrade --no-deps')
|
||||
|
||||
|
||||
def elu(x, alpha=1.0):
|
||||
""" Exponential linear unit
|
||||
|
||||
# Arguments
|
||||
x: Tensor to compute the activation function for.
|
||||
alpha: scalar
|
||||
"""
|
||||
_assert_has_capability(T.nnet, 'elu')
|
||||
return T.nnet.elu(x, alpha)
|
||||
|
||||
|
||||
def relu(x, alpha=0., max_value=None):
|
||||
assert hasattr(T.nnet, 'relu'), ('It looks like like your version of '
|
||||
'Theano is out of date. '
|
||||
'Install the latest version with:\n'
|
||||
'pip install git+git://github.com/Theano/Theano.git --upgrade --no-deps')
|
||||
_assert_has_capability(T.nnet, 'relu')
|
||||
x = T.nnet.relu(x, alpha)
|
||||
if max_value is not None:
|
||||
x = T.minimum(x, max_value)
|
||||
|
@ -107,9 +107,7 @@ class ELU(Layer):
|
||||
super(ELU, self).__init__(**kwargs)
|
||||
|
||||
def call(self, x, mask=None):
|
||||
pos = K.relu(x)
|
||||
neg = (x - abs(x)) * 0.5
|
||||
return pos + self.alpha * (K.exp(neg) - 1.)
|
||||
return K.elu(x, self.alpha)
|
||||
|
||||
def get_config(self):
|
||||
config = {'alpha': float(self.alpha)}
|
||||
|
@ -492,6 +492,7 @@ class TestBackend(object):
|
||||
check_single_tensor_operation('relu', (4, 2), alpha=0.1, max_value=0.5)
|
||||
check_single_tensor_operation('softmax', (4, 10))
|
||||
check_single_tensor_operation('softplus', (4, 10))
|
||||
check_single_tensor_operation('elu', (4, 10), alpha=0.5)
|
||||
|
||||
check_single_tensor_operation('sigmoid', (4, 2))
|
||||
check_single_tensor_operation('hard_sigmoid', (4, 2))
|
||||
|
@ -131,6 +131,23 @@ def test_relu():
|
||||
assert_allclose(result, test_values, rtol=1e-05)
|
||||
|
||||
|
||||
def test_elu():
|
||||
x = K.placeholder(ndim=2)
|
||||
f = K.function([x], [activations.elu(x, 0.5)])
|
||||
|
||||
test_values = get_standard_values()
|
||||
result = f([test_values])[0]
|
||||
|
||||
# because no negatives in test values
|
||||
assert_allclose(result, test_values, rtol=1e-05)
|
||||
|
||||
negative_values = np.array([[-1, -2]], dtype=K.floatx())
|
||||
result = f([negative_values])[0]
|
||||
true_result = (np.exp(negative_values) - 1) / 2
|
||||
|
||||
assert_allclose(result, true_result)
|
||||
|
||||
|
||||
def test_tanh():
|
||||
test_values = get_standard_values()
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user