keras/keras_core/models/sequential.py

164 lines
6.1 KiB
Python
Raw Normal View History

2023-04-13 00:12:57 +00:00
from tensorflow import nest
from keras_core import backend
2023-04-09 19:53:37 +00:00
from keras_core.api_export import keras_core_export
2023-04-13 00:12:57 +00:00
from keras_core.layers.core.input_layer import InputLayer
from keras_core.models.functional import Functional
2023-04-09 19:53:37 +00:00
from keras_core.models.model import Model
2023-04-13 00:12:57 +00:00
from keras_core.utils import tracking
2023-04-09 19:35:32 +00:00
2023-04-09 19:53:37 +00:00
@keras_core_export(["keras_core.Sequential", "keras_core.models.Sequential"])
2023-04-09 19:21:45 +00:00
class Sequential(Model):
2023-04-13 00:12:57 +00:00
@tracking.no_automatic_dependency_tracking
def __init__(self, layers=None, trainable=True, name=None):
super().__init__(trainable=trainable, name=name)
self._functional = None
self._layers = []
if layers:
for layer in layers:
self.add(layer, rebuild=False)
self._maybe_rebuild()
2023-04-13 00:12:57 +00:00
def add(self, layer, rebuild=True):
2023-04-13 00:12:57 +00:00
# If we are passed a Keras tensor created by keras.Input(), we
# extract the input layer from its keras history and use that.
if hasattr(layer, "_keras_history"):
origin_layer = layer._keras_history[0]
if isinstance(origin_layer, InputLayer):
layer = origin_layer
if not self._is_layer_name_unique(layer):
raise ValueError(
"All layers added to a Sequential model "
f"should have unique names. Name '{layer.name}' is already "
"the name of a layer in this model. Update the `name` argument "
"to pass a unique name."
)
if (
isinstance(layer, InputLayer)
and self._layers
and isinstance(self._layers[0], InputLayer)
):
raise ValueError(
f"Sequential model '{self.name}' has already been configured to "
2023-04-13 00:35:54 +00:00
f"use input shape {self._layers[0].batch_shape}. You cannot add "
2023-04-13 00:12:57 +00:00
f"a different Input layer to it."
)
self._layers.append(layer)
if rebuild:
self._maybe_rebuild()
else:
self.built = False
self._functional = None
2023-04-13 00:12:57 +00:00
def pop(self, rebuild=True):
2023-04-13 00:12:57 +00:00
layer = self._layers.pop()
if rebuild:
self._maybe_rebuild()
else:
self.built = False
self._functional = None
2023-04-13 00:12:57 +00:00
return layer
def _maybe_rebuild(self):
if isinstance(self._layers[0], InputLayer) and len(self._layers) > 1:
input_shape = self._layers[0].batch_shape
self.build(input_shape)
else:
self.built = False
self._functional = None
2023-04-13 00:12:57 +00:00
def build(self, input_shape=None):
2023-04-13 00:54:02 +00:00
if not isinstance(input_shape, (tuple, list)):
# Do not attempt to build if the model does not have a single input tensor.
return
2023-04-13 00:12:57 +00:00
if not self._layers:
raise ValueError(
f"Sequential model {self.name} cannot be built because it has no layers. "
"Call `model.add(layer)`."
)
if isinstance(self._layers[0], InputLayer):
if self._layers[0].batch_shape != input_shape:
raise ValueError(
f"Sequential model '{self.name}' has already been configured to "
f"use input shape {self._layers[0].batch_shape}. You cannot build it "
f"with input_shape {input_shape}"
)
else:
self._layers = [InputLayer(batch_shape=input_shape)] + self._layers
# Build functional model
inputs = self._layers[0].output
x = inputs
for layer in self._layers[1:]:
x = layer(x)
outputs = x
self._functional = Functional(inputs=inputs, outputs=outputs)
self.built = True
def call(self, inputs, training=None, mask=None):
if self._functional:
return self._functional(inputs, training=training, mask=mask)
# Else, check if we can build a Functional model
if isinstance(inputs, backend.KerasTensor) or backend.is_tensor(inputs):
self.build(inputs.shape)
return self._functional(inputs, training=training, mask=mask)
# No functional model can be built -- Just apply the layer sequence.
# This typically happens if `inputs` is a nested struct.
for layer in self.layers:
# During each iteration, `inputs` are the inputs to `layer`, and
# `outputs` are the outputs of `layer` applied to `inputs`. At the
# end of each iteration `inputs` is set to `outputs` to prepare for
# the next layer.
kwargs = {}
2023-04-13 00:54:02 +00:00
if layer._call_has_mask_arg():
2023-04-13 00:12:57 +00:00
kwargs["mask"] = mask
2023-04-13 00:54:02 +00:00
if layer._call_has_training_arg():
2023-04-13 00:12:57 +00:00
kwargs["training"] = training
outputs = layer(inputs, **kwargs)
inputs = outputs
def _get_mask_from_keras_tensor(kt):
return getattr(kt, "_keras_mask", None)
mask = nest.map_structure(_get_mask_from_keras_tensor, outputs)
return outputs
@property
def layers(self):
# Historically, `sequential.layers` only returns layers that were added
# via `add`, and omits the auto-generated `InputLayer` that comes at the
# bottom of the stack.
layers = self._layers
if layers and isinstance(layers[0], InputLayer):
return layers[1:]
return layers[:]
def compute_output_spec(self, inputs, training=None, mask=None):
if self._functional:
return self._functional.compute_output_spec(
inputs, training=training, mask=mask
)
# Direct application
for layer in self.layers:
outputs = layer.compute_output_spec(
inputs, training=training
) # Ignore mask
inputs = outputs
return outputs
def _is_layer_name_unique(self, layer):
for ref_layer in self._layers:
if layer.name == ref_layer.name and ref_layer is not layer:
return False
return True
def get_config(self):
raise NotImplementedError
2023-04-09 19:21:45 +00:00
2023-04-13 00:12:57 +00:00
@classmethod
def from_config(cls, config):
raise NotImplementedError