text
stringlengths 0
4.99k
|
---|
# Call schedule function to get the scheduled learning rate.
|
scheduled_lr = self.schedule(epoch, lr)
|
# Set the value back to the optimizer before this epoch starts
|
tf.keras.backend.set_value(self.model.optimizer.lr, scheduled_lr)
|
print("\nEpoch %05d: Learning rate is %6.4f." % (epoch, scheduled_lr))
|
LR_SCHEDULE = [
|
# (epoch to start, learning rate) tuples
|
(3, 0.05),
|
(6, 0.01),
|
(9, 0.005),
|
(12, 0.001),
|
]
|
def lr_schedule(epoch, lr):
|
"""Helper function to retrieve the scheduled learning rate based on epoch."""
|
if epoch < LR_SCHEDULE[0][0] or epoch > LR_SCHEDULE[-1][0]:
|
return lr
|
for i in range(len(LR_SCHEDULE)):
|
if epoch == LR_SCHEDULE[i][0]:
|
return LR_SCHEDULE[i][1]
|
return lr
|
model = get_model()
|
model.fit(
|
x_train,
|
y_train,
|
batch_size=64,
|
steps_per_epoch=5,
|
epochs=15,
|
verbose=0,
|
callbacks=[
|
LossAndErrorPrintingCallback(),
|
CustomLearningRateScheduler(lr_schedule),
|
],
|
)
|
Epoch 00000: Learning rate is 0.1000.
|
For batch 0, loss is 32.53.
|
For batch 1, loss is 430.35.
|
For batch 2, loss is 294.47.
|
For batch 3, loss is 223.69.
|
For batch 4, loss is 180.61.
|
The average loss for epoch 0 is 180.61 and mean absolute error is 8.20.
|
Epoch 00001: Learning rate is 0.1000.
|
For batch 0, loss is 6.72.
|
For batch 1, loss is 5.57.
|
For batch 2, loss is 5.33.
|
For batch 3, loss is 5.35.
|
For batch 4, loss is 5.53.
|
The average loss for epoch 1 is 5.53 and mean absolute error is 1.92.
|
Epoch 00002: Learning rate is 0.1000.
|
For batch 0, loss is 5.22.
|
For batch 1, loss is 5.19.
|
For batch 2, loss is 5.51.
|
For batch 3, loss is 5.80.
|
For batch 4, loss is 5.69.
|
The average loss for epoch 2 is 5.69 and mean absolute error is 1.99.
|
Epoch 00003: Learning rate is 0.0500.
|
For batch 0, loss is 6.21.
|
For batch 1, loss is 4.85.
|
For batch 2, loss is 4.90.
|
For batch 3, loss is 4.66.
|
For batch 4, loss is 4.54.
|
The average loss for epoch 3 is 4.54 and mean absolute error is 1.69.
|
Epoch 00004: Learning rate is 0.0500.
|
For batch 0, loss is 3.62.
|
For batch 1, loss is 3.58.
|
For batch 2, loss is 3.92.
|
For batch 3, loss is 3.73.
|
For batch 4, loss is 3.65.
|
The average loss for epoch 4 is 3.65 and mean absolute error is 1.57.
|
Epoch 00005: Learning rate is 0.0500.
|
For batch 0, loss is 4.42.
|
For batch 1, loss is 4.95.
|
For batch 2, loss is 5.83.
|
For batch 3, loss is 6.36.
|
For batch 4, loss is 6.62.
|
The average loss for epoch 5 is 6.62 and mean absolute error is 2.09.
|
Epoch 00006: Learning rate is 0.0100.
|
For batch 0, loss is 8.74.
|
For batch 1, loss is 7.34.
|
For batch 2, loss is 5.55.
|
For batch 3, loss is 4.98.
|
For batch 4, loss is 4.48.
|
The average loss for epoch 6 is 4.48 and mean absolute error is 1.65.
|
Epoch 00007: Learning rate is 0.0100.
|
For batch 0, loss is 4.30.
|
For batch 1, loss is 4.01.
|
For batch 2, loss is 3.97.
|
For batch 3, loss is 3.68.
|
For batch 4, loss is 3.76.
|
The average loss for epoch 7 is 3.76 and mean absolute error is 1.51.
|
Epoch 00008: Learning rate is 0.0100.
|
For batch 0, loss is 3.41.
|
For batch 1, loss is 3.74.
|
For batch 2, loss is 3.51.
|
For batch 3, loss is 3.52.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.