text
stringlengths 0
4.99k
|
---|
def call(self, inputs):
|
return tf.matmul(inputs, self.w) + self.b
|
def get_config(self):
|
config = super(CustomLayer, self).get_config()
|
config.update({"units": self.units})
|
return config
|
def custom_activation(x):
|
return tf.nn.tanh(x) ** 2
|
# Make a model with the CustomLayer and custom_activation
|
inputs = keras.Input((32,))
|
x = CustomLayer(32)(inputs)
|
outputs = keras.layers.Activation(custom_activation)(x)
|
model = keras.Model(inputs, outputs)
|
# Retrieve the config
|
config = model.get_config()
|
# At loading time, register the custom objects with a `custom_object_scope`:
|
custom_objects = {"CustomLayer": CustomLayer, "custom_activation": custom_activation}
|
with keras.utils.custom_object_scope(custom_objects):
|
new_model = keras.Model.from_config(config)
|
In-memory model cloning
|
You can also do in-memory cloning of a model via tf.keras.models.clone_model(). This is equivalent to getting the config then recreating the model from its config (so it does not preserve compilation information or layer weights values).
|
Example:
|
with keras.utils.custom_object_scope(custom_objects):
|
new_model = keras.models.clone_model(model)
|
Saving & loading only the model's weights values
|
You can choose to only save & load a model's weights. This can be useful if:
|
You only need the model for inference: in this case you won't need to restart training, so you don't need the compilation information or optimizer state.
|
You are doing transfer learning: in this case you will be training a new model reusing the state of a prior model, so you don't need the compilation information of the prior model.
|
APIs for in-memory weight transfer
|
Weights can be copied between different objects by using get_weights and set_weights:
|
tf.keras.layers.Layer.get_weights(): Returns a list of numpy arrays.
|
tf.keras.layers.Layer.set_weights(): Sets the model weights to the values in the weights argument.
|
Examples below.
|
Transfering weights from one layer to another, in memory
|
def create_layer():
|
layer = keras.layers.Dense(64, activation="relu", name="dense_2")
|
layer.build((None, 784))
|
return layer
|
layer_1 = create_layer()
|
layer_2 = create_layer()
|
# Copy weights from layer 1 to layer 2
|
layer_2.set_weights(layer_1.get_weights())
|
Transfering weights from one model to another model with a compatible architecture, in memory
|
# Create a simple functional model
|
inputs = keras.Input(shape=(784,), name="digits")
|
x = keras.layers.Dense(64, activation="relu", name="dense_1")(inputs)
|
x = keras.layers.Dense(64, activation="relu", name="dense_2")(x)
|
outputs = keras.layers.Dense(10, name="predictions")(x)
|
functional_model = keras.Model(inputs=inputs, outputs=outputs, name="3_layer_mlp")
|
# Define a subclassed model with the same architecture
|
class SubclassedModel(keras.Model):
|
def __init__(self, output_dim, name=None):
|
super(SubclassedModel, self).__init__(name=name)
|
self.output_dim = output_dim
|
self.dense_1 = keras.layers.Dense(64, activation="relu", name="dense_1")
|
self.dense_2 = keras.layers.Dense(64, activation="relu", name="dense_2")
|
self.dense_3 = keras.layers.Dense(output_dim, name="predictions")
|
def call(self, inputs):
|
x = self.dense_1(inputs)
|
x = self.dense_2(x)
|
x = self.dense_3(x)
|
return x
|
def get_config(self):
|
return {"output_dim": self.output_dim, "name": self.name}
|
subclassed_model = SubclassedModel(10)
|
# Call the subclassed model once to create the weights.
|
subclassed_model(tf.ones((1, 784)))
|
# Copy weights from functional_model to subclassed_model.
|
subclassed_model.set_weights(functional_model.get_weights())
|
assert len(functional_model.weights) == len(subclassed_model.weights)
|
for a, b in zip(functional_model.weights, subclassed_model.weights):
|
np.testing.assert_allclose(a.numpy(), b.numpy())
|
The case of stateless layers
|
Because stateless layers do not change the order or number of weights, models can have compatible architectures even if there are extra/missing stateless layers.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.