text
stringlengths 0
4.99k
|
---|
import numpy as np
|
inputs = keras.Input(shape=(3,))
|
outputs = ActivityRegularizationLayer()(inputs)
|
model = keras.Model(inputs, outputs)
|
# If there is a loss passed in `compile`, the regularization
|
# losses get added to it
|
model.compile(optimizer="adam", loss="mse")
|
model.fit(np.random.random((2, 3)), np.random.random((2, 3)))
|
# It's also possible not to pass any loss in `compile`,
|
# since the model already has a loss to minimize, via the `add_loss`
|
# call during the forward pass!
|
model.compile(optimizer="adam")
|
model.fit(np.random.random((2, 3)), np.random.random((2, 3)))
|
1/1 [==============================] - 0s 1ms/step - loss: 0.1555
|
1/1 [==============================] - 0s 927us/step - loss: 0.0336
|
<tensorflow.python.keras.callbacks.History at 0x145bca6d0>
|
The add_metric() method
|
Similarly to add_loss(), layers also have an add_metric() method for tracking the moving average of a quantity during training.
|
Consider the following layer: a "logistic endpoint" layer. It takes as inputs predictions & targets, it computes a loss which it tracks via add_loss(), and it computes an accuracy scalar, which it tracks via add_metric().
|
class LogisticEndpoint(keras.layers.Layer):
|
def __init__(self, name=None):
|
super(LogisticEndpoint, self).__init__(name=name)
|
self.loss_fn = keras.losses.BinaryCrossentropy(from_logits=True)
|
self.accuracy_fn = keras.metrics.BinaryAccuracy()
|
def call(self, targets, logits, sample_weights=None):
|
# Compute the training-time loss value and add it
|
# to the layer using `self.add_loss()`.
|
loss = self.loss_fn(targets, logits, sample_weights)
|
self.add_loss(loss)
|
# Log accuracy as a metric and add it
|
# to the layer using `self.add_metric()`.
|
acc = self.accuracy_fn(targets, logits, sample_weights)
|
self.add_metric(acc, name="accuracy")
|
# Return the inference-time prediction tensor (for `.predict()`).
|
return tf.nn.softmax(logits)
|
Metrics tracked in this way are accessible via layer.metrics:
|
layer = LogisticEndpoint()
|
targets = tf.ones((2, 2))
|
logits = tf.ones((2, 2))
|
y = layer(targets, logits)
|
print("layer.metrics:", layer.metrics)
|
print("current accuracy value:", float(layer.metrics[0].result()))
|
layer.metrics: [<tensorflow.python.keras.metrics.BinaryAccuracy object at 0x145bccdd0>]
|
current accuracy value: 1.0
|
Just like for add_loss(), these metrics are tracked by fit():
|
inputs = keras.Input(shape=(3,), name="inputs")
|
targets = keras.Input(shape=(10,), name="targets")
|
logits = keras.layers.Dense(10)(inputs)
|
predictions = LogisticEndpoint(name="predictions")(logits, targets)
|
model = keras.Model(inputs=[inputs, targets], outputs=predictions)
|
model.compile(optimizer="adam")
|
data = {
|
"inputs": np.random.random((3, 3)),
|
"targets": np.random.random((3, 10)),
|
}
|
model.fit(data)
|
1/1 [==============================] - 0s 999us/step - loss: 1.0366 - binary_accuracy: 0.0000e+00
|
<tensorflow.python.keras.callbacks.History at 0x1452c7650>
|
You can optionally enable serialization on your layers
|
If you need your custom layers to be serializable as part of a Functional model, you can optionally implement a get_config() method:
|
class Linear(keras.layers.Layer):
|
def __init__(self, units=32):
|
super(Linear, self).__init__()
|
self.units = units
|
def build(self, input_shape):
|
self.w = self.add_weight(
|
shape=(input_shape[-1], self.units),
|
initializer="random_normal",
|
trainable=True,
|
)
|
self.b = self.add_weight(
|
shape=(self.units,), initializer="random_normal", trainable=True
|
)
|
def call(self, inputs):
|
return tf.matmul(inputs, self.w) + self.b
|
def get_config(self):
|
return {"units": self.units}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.