2023-04-18 22:46:57 +00:00
|
|
|
import numpy as np
|
|
|
|
|
2023-05-17 23:06:18 +00:00
|
|
|
import keras_core
|
2023-04-18 22:46:57 +00:00
|
|
|
from keras_core import Model
|
|
|
|
from keras_core import backend
|
|
|
|
from keras_core import initializers
|
|
|
|
from keras_core import layers
|
|
|
|
from keras_core import losses
|
|
|
|
from keras_core import metrics
|
|
|
|
from keras_core import operations as ops
|
|
|
|
from keras_core import optimizers
|
|
|
|
|
|
|
|
|
|
|
|
class MyDense(layers.Layer):
|
|
|
|
def __init__(self, units, name=None):
|
|
|
|
super().__init__(name=name)
|
|
|
|
self.units = units
|
|
|
|
|
|
|
|
def build(self, input_shape):
|
|
|
|
input_dim = input_shape[-1]
|
|
|
|
w_shape = (input_dim, self.units)
|
|
|
|
w_value = initializers.GlorotUniform()(w_shape)
|
2023-04-19 21:35:28 +00:00
|
|
|
# State must be stored in backend.Variable objects.
|
2023-05-03 05:44:46 +00:00
|
|
|
self.w = backend.Variable(w_value, name="kernel", trainable=True)
|
|
|
|
|
|
|
|
# You can also use add_weight
|
|
|
|
self.b = self.add_weight(
|
|
|
|
shape=(self.units,),
|
|
|
|
initializer="zeros",
|
|
|
|
name="bias",
|
|
|
|
trainable=True,
|
|
|
|
)
|
2023-04-18 22:46:57 +00:00
|
|
|
|
|
|
|
def call(self, inputs):
|
|
|
|
# Use Keras ops to create backend-agnostic layers/metrics/etc.
|
|
|
|
return ops.matmul(inputs, self.w) + self.b
|
2023-04-19 01:45:30 +00:00
|
|
|
|
2023-04-18 23:21:27 +00:00
|
|
|
|
|
|
|
class MyDropout(layers.Layer):
|
|
|
|
def __init__(self, rate, name=None):
|
|
|
|
super().__init__(name=name)
|
|
|
|
self.rate = rate
|
|
|
|
# Use seed_generator for managing RNG state.
|
|
|
|
# It is a state element and its seed variable is
|
|
|
|
# tracked as part of `layer.variables`.
|
2023-05-17 23:06:18 +00:00
|
|
|
self.seed_generator = keras_core.random.SeedGenerator(1337)
|
2023-04-18 23:21:27 +00:00
|
|
|
|
|
|
|
def call(self, inputs):
|
2023-05-17 23:06:18 +00:00
|
|
|
# Use `keras_core.random` for random ops.
|
|
|
|
return keras_core.random.dropout(
|
2023-04-18 23:21:27 +00:00
|
|
|
inputs, self.rate, seed=self.seed_generator
|
|
|
|
)
|
2023-04-18 22:46:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
class MyModel(Model):
|
|
|
|
def __init__(self, hidden_dim, output_dim):
|
|
|
|
super().__init__()
|
|
|
|
self.dense1 = MyDense(hidden_dim)
|
|
|
|
self.dense2 = MyDense(hidden_dim)
|
|
|
|
self.dense3 = MyDense(output_dim)
|
2023-04-18 23:21:27 +00:00
|
|
|
self.dp = MyDropout(0.5)
|
2023-04-18 22:46:57 +00:00
|
|
|
|
|
|
|
def call(self, x):
|
|
|
|
x1 = self.dense1(x)
|
|
|
|
x2 = self.dense2(x)
|
|
|
|
# Why not use some ops here as well
|
|
|
|
x = ops.concatenate([x1, x2], axis=-1)
|
2023-04-18 23:21:27 +00:00
|
|
|
x = self.dp(x)
|
2023-04-18 22:46:57 +00:00
|
|
|
return self.dense3(x)
|
|
|
|
|
|
|
|
|
|
|
|
model = MyModel(hidden_dim=256, output_dim=16)
|
|
|
|
|
|
|
|
x = np.random.random((50000, 128))
|
|
|
|
y = np.random.random((50000, 16))
|
|
|
|
batch_size = 32
|
2023-05-03 05:44:46 +00:00
|
|
|
epochs = 5
|
2023-04-18 22:46:57 +00:00
|
|
|
|
|
|
|
model.compile(
|
|
|
|
optimizer=optimizers.SGD(learning_rate=0.001),
|
|
|
|
loss=losses.MeanSquaredError(),
|
|
|
|
metrics=[metrics.MeanSquaredError()],
|
|
|
|
)
|
|
|
|
history = model.fit(x, y, batch_size=batch_size, epochs=epochs)
|
|
|
|
|
2023-04-18 23:21:27 +00:00
|
|
|
model.summary()
|
|
|
|
|
2023-04-18 22:46:57 +00:00
|
|
|
print("History:")
|
|
|
|
print(history.history)
|