Add Activation layer.

This commit is contained in:
Francois Chollet 2023-04-22 19:00:56 -07:00
parent b88084efdb
commit 183f360112
5 changed files with 84 additions and 6 deletions

@ -1,3 +1,4 @@
from keras_core.layers.core.activation import Activation
from keras_core.layers.core.dense import Dense
from keras_core.layers.core.input_layer import Input
from keras_core.layers.core.input_layer import InputLayer

@ -0,0 +1,39 @@
from keras_core import activations
from keras_core.api_export import keras_core_export
from keras_core.layers.layer import Layer
@keras_core_export("keras_core.layers.Activation")
class Activation(Layer):
"""Applies an activation function to an output.
Args:
activation: Activation function. It could be
a callable, or the name of an activation
from the `keras_core.activations` namespace.
Example:
>>> layer = keras_core.layers.Activation('relu')
>>> layer([-3.0, -1.0, 0.0, 2.0])
[0.0, 0.0, 0.0, 2.0]
>>> layer = keras_core.layers.Activation(keras_core.activations.relu)
>>> layer([-3.0, -1.0, 0.0, 2.0])
[0.0, 0.0, 0.0, 2.0]
"""
def __init__(self, activation, **kwargs):
super().__init__(**kwargs)
self.supports_masking = True
self.activation = activations.get(activation)
def call(self, inputs):
return self.activation(inputs)
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {"activation": activations.serialize(self.activation)}
base_config = super().get_config()
return {**base_config, **config}

@ -0,0 +1,33 @@
from keras_core import activations
from keras_core import layers
from keras_core import testing
class ActivationTest(testing.TestCase):
def test_dense_basics(self):
self.run_layer_test(
layers.Activation,
init_kwargs={
"activation": "relu",
},
input_shape=(2, 3),
expected_output_shape=(2, 3),
expected_num_trainable_weights=0,
expected_num_non_trainable_weights=0,
expected_num_seed_generators=0,
expected_num_losses=0,
supports_masking=True,
)
self.run_layer_test(
layers.Activation,
init_kwargs={
"activation": activations.gelu,
},
input_shape=(2, 2),
expected_output_shape=(2, 2),
expected_num_trainable_weights=0,
expected_num_non_trainable_weights=0,
expected_num_seed_generators=0,
expected_num_losses=0,
supports_masking=True,
)

@ -19,9 +19,9 @@ class Dense(Layer):
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
name=None,
**kwargs,
):
super().__init__(name=name)
super().__init__(**kwargs)
self.units = units
self.activation = activations.get(activation)
self.use_bias = use_bias
@ -57,6 +57,11 @@ class Dense(Layer):
x = x + self.bias
return self.activation(x)
def compute_output_shape(self, input_shape):
output_shape = list(input_shape)
output_shape[-1] = self.units
return tuple(output_shape)
def get_config(self):
base_config = super().get_config()
config = {

@ -1,14 +1,14 @@
import numpy as np
from keras_core import layers
from keras_core import testing
from keras_core.layers.core.dense import Dense
class DenseTest(testing.TestCase):
def test_dense_basics(self):
# 2D case, no bias.
self.run_layer_test(
Dense,
layers.Dense,
init_kwargs={
"units": 4,
"activation": "relu",
@ -26,7 +26,7 @@ class DenseTest(testing.TestCase):
)
# 3D case, some regularizers.
self.run_layer_test(
Dense,
layers.Dense,
init_kwargs={
"units": 5,
"activation": "sigmoid",
@ -43,7 +43,7 @@ class DenseTest(testing.TestCase):
)
def test_dense_correctness(self):
layer = Dense(units=2, activation="relu")
layer = layers.Dense(units=2, activation="relu")
layer.build((1, 2))
layer.set_weights(
[