diff --git a/keras_core/layers/activations/leaky_relu.py b/keras_core/layers/activations/leaky_relu.py index 52ff66138..257a82551 100644 --- a/keras_core/layers/activations/leaky_relu.py +++ b/keras_core/layers/activations/leaky_relu.py @@ -7,15 +7,17 @@ from keras_core.layers.layer import Layer class LeakyReLU(Layer): """Leaky version of a Rectified Linear Unit activation layer. - The layer allows a small gradient when the unit is not active. + This layer allows a small gradient when the unit is not active. Formula: + ``` python f(x) = alpha * x if x < 0 f(x) = x if x >= 0 ``` Example: + ``` python leaky_relu_layer = LeakyReLU(negative_slope=0.5) input = np.array([-10, -5, 0.0, 5, 10]) @@ -36,8 +38,8 @@ class LeakyReLU(Layer): if negative_slope is None: raise ValueError( "The negative_slope value of a Leaky ReLU layer " - "cannot be None, Expecting a float. Received " - f"negative_slope: {negative_slope}" + "cannot be None, Expecting a float. Received: " + f"negative_slope={negative_slope}" ) self.supports_masking = True self.negative_slope = negative_slope diff --git a/keras_core/layers/activations/leaky_relu_test.py b/keras_core/layers/activations/leaky_relu_test.py index 926771201..44eae0505 100644 --- a/keras_core/layers/activations/leaky_relu_test.py +++ b/keras_core/layers/activations/leaky_relu_test.py @@ -25,8 +25,7 @@ class LeakyReLUTest(testing.TestCase): def test_invalid_usage(self): with self.assertRaisesRegex( ValueError, - "The negative_slope value of a Leaky ReLU layer cannot be None, " - "Expecting a float. Received negative_slope: None", + "The negative_slope value of a Leaky ReLU layer cannot be None", ): self.run_layer_test( leaky_relu.LeakyReLU,