Forked from erenon/train_on_batch_with_tensorboard.py
Created
December 25, 2018 04:54
-
-
Save binshengliu/b9972da2b93c60d98b4650a77e54e26f to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This example shows how to use keras TensorBoard callback | |
# with model.train_on_batch | |
import tensorflow.keras as keras | |
# Setup the model | |
model = keras.models.Sequential() | |
model.add(...) # Add your layers | |
model.compile(...) # Compile as usual | |
batch_size=256 | |
# Create the TensorBoard callback, | |
# which we will drive manually | |
tensorboard = keras.callbacks.TensorBoard( | |
log_dir='/tmp/my_tf_logs', | |
histogram_freq=0, | |
batch_size=batch_size, | |
write_graph=True, | |
write_grads=True | |
) | |
tensorboard.set_model(model) | |
# Transform train_on_batch return value | |
# to dict expected by on_batch_end callback | |
def named_logs(model, logs): | |
result = {} | |
for l in zip(model.metrics_names, logs): | |
result[l[0]] = l[1] | |
return result | |
# Run training batches, notify tensorboard at the end of each epoch | |
for batch_id in range(1000): | |
x_train,y_train = create_training_data(batch_size) | |
logs = model.train_on_batch(x_train, y_train) | |
tensorboard.on_epoch_end(batch_id, named_logs(model, logs)) | |
tensorboard.on_train_end(None) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment