2023-04-25 01:46:03 +00:00
|
|
|
import copy
|
|
|
|
|
2023-04-13 00:12:57 +00:00
|
|
|
from tensorflow import nest
|
|
|
|
|
2023-04-09 19:53:37 +00:00
|
|
|
from keras_core.api_export import keras_core_export
|
2023-04-13 00:12:57 +00:00
|
|
|
from keras_core.layers.core.input_layer import InputLayer
|
|
|
|
from keras_core.models.functional import Functional
|
2023-04-09 19:53:37 +00:00
|
|
|
from keras_core.models.model import Model
|
2023-04-25 01:46:03 +00:00
|
|
|
from keras_core.saving import serialization_lib
|
2023-04-13 00:12:57 +00:00
|
|
|
from keras_core.utils import tracking
|
2023-04-09 19:35:32 +00:00
|
|
|
|
|
|
|
|
2023-04-09 19:53:37 +00:00
|
|
|
@keras_core_export(["keras_core.Sequential", "keras_core.models.Sequential"])
|
2023-04-09 19:21:45 +00:00
|
|
|
class Sequential(Model):
|
2023-04-13 00:12:57 +00:00
|
|
|
@tracking.no_automatic_dependency_tracking
|
|
|
|
def __init__(self, layers=None, trainable=True, name=None):
|
|
|
|
super().__init__(trainable=trainable, name=name)
|
|
|
|
self._functional = None
|
|
|
|
self._layers = []
|
|
|
|
if layers:
|
|
|
|
for layer in layers:
|
2023-04-13 18:47:36 +00:00
|
|
|
self.add(layer, rebuild=False)
|
|
|
|
self._maybe_rebuild()
|
2023-04-13 00:12:57 +00:00
|
|
|
|
2023-04-13 18:47:36 +00:00
|
|
|
def add(self, layer, rebuild=True):
|
2023-04-13 00:12:57 +00:00
|
|
|
# If we are passed a Keras tensor created by keras.Input(), we
|
|
|
|
# extract the input layer from its keras history and use that.
|
|
|
|
if hasattr(layer, "_keras_history"):
|
|
|
|
origin_layer = layer._keras_history[0]
|
|
|
|
if isinstance(origin_layer, InputLayer):
|
|
|
|
layer = origin_layer
|
|
|
|
if not self._is_layer_name_unique(layer):
|
|
|
|
raise ValueError(
|
|
|
|
"All layers added to a Sequential model "
|
|
|
|
f"should have unique names. Name '{layer.name}' is already "
|
|
|
|
"the name of a layer in this model. Update the `name` argument "
|
|
|
|
"to pass a unique name."
|
|
|
|
)
|
|
|
|
if (
|
|
|
|
isinstance(layer, InputLayer)
|
|
|
|
and self._layers
|
|
|
|
and isinstance(self._layers[0], InputLayer)
|
|
|
|
):
|
|
|
|
raise ValueError(
|
|
|
|
f"Sequential model '{self.name}' has already been configured to "
|
2023-04-13 00:35:54 +00:00
|
|
|
f"use input shape {self._layers[0].batch_shape}. You cannot add "
|
2023-04-13 00:12:57 +00:00
|
|
|
f"a different Input layer to it."
|
|
|
|
)
|
|
|
|
|
|
|
|
self._layers.append(layer)
|
2023-04-13 18:47:36 +00:00
|
|
|
if rebuild:
|
|
|
|
self._maybe_rebuild()
|
|
|
|
else:
|
|
|
|
self.built = False
|
|
|
|
self._functional = None
|
2023-04-13 00:12:57 +00:00
|
|
|
|
2023-04-13 18:47:36 +00:00
|
|
|
def pop(self, rebuild=True):
|
2023-04-13 00:12:57 +00:00
|
|
|
layer = self._layers.pop()
|
2023-04-13 18:47:36 +00:00
|
|
|
if rebuild:
|
|
|
|
self._maybe_rebuild()
|
|
|
|
else:
|
|
|
|
self.built = False
|
|
|
|
self._functional = None
|
2023-04-13 00:12:57 +00:00
|
|
|
return layer
|
|
|
|
|
2023-04-13 18:47:36 +00:00
|
|
|
def _maybe_rebuild(self):
|
|
|
|
if isinstance(self._layers[0], InputLayer) and len(self._layers) > 1:
|
|
|
|
input_shape = self._layers[0].batch_shape
|
|
|
|
self.build(input_shape)
|
|
|
|
else:
|
|
|
|
self.built = False
|
|
|
|
self._functional = None
|
|
|
|
|
2023-04-13 00:12:57 +00:00
|
|
|
def build(self, input_shape=None):
|
2023-04-13 00:54:02 +00:00
|
|
|
if not isinstance(input_shape, (tuple, list)):
|
|
|
|
# Do not attempt to build if the model does not have a single input tensor.
|
|
|
|
return
|
2023-04-14 09:48:24 +00:00
|
|
|
if input_shape and not (
|
|
|
|
isinstance(input_shape[0], int) or input_shape[0] is None
|
|
|
|
):
|
2023-04-14 09:47:42 +00:00
|
|
|
# Do not attempt to build if the model does not have a single input tensor.
|
2023-04-14 09:48:24 +00:00
|
|
|
return
|
2023-04-13 00:12:57 +00:00
|
|
|
if not self._layers:
|
|
|
|
raise ValueError(
|
|
|
|
f"Sequential model {self.name} cannot be built because it has no layers. "
|
|
|
|
"Call `model.add(layer)`."
|
|
|
|
)
|
|
|
|
if isinstance(self._layers[0], InputLayer):
|
|
|
|
if self._layers[0].batch_shape != input_shape:
|
|
|
|
raise ValueError(
|
|
|
|
f"Sequential model '{self.name}' has already been configured to "
|
|
|
|
f"use input shape {self._layers[0].batch_shape}. You cannot build it "
|
|
|
|
f"with input_shape {input_shape}"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self._layers = [InputLayer(batch_shape=input_shape)] + self._layers
|
|
|
|
|
|
|
|
# Build functional model
|
|
|
|
inputs = self._layers[0].output
|
|
|
|
x = inputs
|
|
|
|
for layer in self._layers[1:]:
|
2023-04-14 09:47:42 +00:00
|
|
|
try:
|
|
|
|
x = layer(x)
|
|
|
|
except NotImplementedError:
|
|
|
|
# Can happen if shape inference is not implemented.
|
|
|
|
# TODO: consider reverting inbound nodes on layers processed so far.
|
|
|
|
return
|
2023-04-13 00:12:57 +00:00
|
|
|
outputs = x
|
|
|
|
self._functional = Functional(inputs=inputs, outputs=outputs)
|
|
|
|
self.built = True
|
|
|
|
|
|
|
|
def call(self, inputs, training=None, mask=None):
|
|
|
|
if self._functional:
|
|
|
|
return self._functional(inputs, training=training, mask=mask)
|
|
|
|
|
2023-04-14 09:47:42 +00:00
|
|
|
# Fallback: Just apply the layer sequence.
|
2023-04-13 00:12:57 +00:00
|
|
|
# This typically happens if `inputs` is a nested struct.
|
|
|
|
for layer in self.layers:
|
|
|
|
# During each iteration, `inputs` are the inputs to `layer`, and
|
|
|
|
# `outputs` are the outputs of `layer` applied to `inputs`. At the
|
|
|
|
# end of each iteration `inputs` is set to `outputs` to prepare for
|
|
|
|
# the next layer.
|
|
|
|
kwargs = {}
|
2023-04-13 00:54:02 +00:00
|
|
|
if layer._call_has_mask_arg():
|
2023-04-13 00:12:57 +00:00
|
|
|
kwargs["mask"] = mask
|
2023-04-13 00:54:02 +00:00
|
|
|
if layer._call_has_training_arg():
|
2023-04-13 00:12:57 +00:00
|
|
|
kwargs["training"] = training
|
|
|
|
outputs = layer(inputs, **kwargs)
|
|
|
|
inputs = outputs
|
|
|
|
|
|
|
|
def _get_mask_from_keras_tensor(kt):
|
|
|
|
return getattr(kt, "_keras_mask", None)
|
|
|
|
|
|
|
|
mask = nest.map_structure(_get_mask_from_keras_tensor, outputs)
|
|
|
|
return outputs
|
|
|
|
|
|
|
|
@property
|
|
|
|
def layers(self):
|
|
|
|
# Historically, `sequential.layers` only returns layers that were added
|
|
|
|
# via `add`, and omits the auto-generated `InputLayer` that comes at the
|
|
|
|
# bottom of the stack.
|
|
|
|
layers = self._layers
|
|
|
|
if layers and isinstance(layers[0], InputLayer):
|
|
|
|
return layers[1:]
|
|
|
|
return layers[:]
|
|
|
|
|
|
|
|
def compute_output_spec(self, inputs, training=None, mask=None):
|
|
|
|
if self._functional:
|
|
|
|
return self._functional.compute_output_spec(
|
|
|
|
inputs, training=training, mask=mask
|
|
|
|
)
|
|
|
|
# Direct application
|
|
|
|
for layer in self.layers:
|
|
|
|
outputs = layer.compute_output_spec(
|
|
|
|
inputs, training=training
|
|
|
|
) # Ignore mask
|
|
|
|
inputs = outputs
|
|
|
|
return outputs
|
|
|
|
|
|
|
|
def _is_layer_name_unique(self, layer):
|
|
|
|
for ref_layer in self._layers:
|
|
|
|
if layer.name == ref_layer.name and ref_layer is not layer:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
def get_config(self):
|
2023-04-25 01:46:03 +00:00
|
|
|
layer_configs = []
|
|
|
|
for layer in super().layers:
|
|
|
|
# `super().layers` include the InputLayer if available (it is
|
|
|
|
# filtered out of `self.layers`).
|
|
|
|
layer_configs.append(
|
|
|
|
serialization_lib.serialize_keras_object(layer)
|
|
|
|
)
|
|
|
|
config = Model.get_config(self)
|
|
|
|
config["name"] = self.name
|
|
|
|
config["layers"] = copy.deepcopy(layer_configs)
|
|
|
|
if self._functional is not None:
|
|
|
|
config["build_input_shape"] = self._layers[0].batch_shape
|
|
|
|
return config
|
2023-04-09 19:21:45 +00:00
|
|
|
|
2023-04-13 00:12:57 +00:00
|
|
|
@classmethod
|
2023-04-25 01:46:03 +00:00
|
|
|
def from_config(cls, config, custom_objects=None):
|
|
|
|
if "name" in config:
|
|
|
|
name = config["name"]
|
|
|
|
build_input_shape = config.get("build_input_shape")
|
|
|
|
layer_configs = config["layers"]
|
|
|
|
else:
|
|
|
|
name = None
|
|
|
|
layer_configs = config
|
|
|
|
model = cls(name=name)
|
|
|
|
for layer_config in layer_configs:
|
|
|
|
layer = serialization_lib.deserialize_keras_object(
|
|
|
|
layer_config,
|
|
|
|
custom_objects=custom_objects,
|
|
|
|
)
|
|
|
|
model.add(layer)
|
|
|
|
if (
|
|
|
|
not model._functional
|
|
|
|
and build_input_shape
|
|
|
|
and isinstance(build_input_shape, (tuple, list))
|
|
|
|
):
|
|
|
|
model.build(build_input_shape)
|
|
|
|
return model
|