This repository has been archived by the owner on Nov 17, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 6.8k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add tensorboard support in Speedometer. (#5345)
* Add tensorboard support in Speedometer. * fix pylint. * Add tensorboard_callback. * Refactor. * fix lint.
- Loading branch information
1 parent
b276a9d
commit 1550f17
Showing
2 changed files
with
58 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -6,3 +6,5 @@ | |
|
||
from . import symbol as sym | ||
from . import ndarray as nd | ||
|
||
from . import tensorboard |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
# coding: utf-8 | ||
"""TensorBoard functions that can be used to log various status during epoch.""" | ||
from __future__ import absolute_import | ||
|
||
import logging | ||
|
||
|
||
class LogMetricsCallback(object): | ||
"""Log metrics periodically in TensorBoard. | ||
This callback works almost same as `callback.Speedometer`, but write TensorBoard event file | ||
for visualization. For more usage, please refer https://github.com/dmlc/tensorboard | ||
Parameters | ||
---------- | ||
logging_dir : str | ||
TensorBoard event file directory. | ||
After that, use `tensorboard --logdir=path/to/logs` to launch TensorBoard visualization. | ||
prefix : str | ||
Prefix for a metric name of `scalar` value. | ||
You might want to use this param to leverage TensorBoard plot feature, | ||
where TensorBoard plots different curves in one graph when they have same `name`. | ||
The follow example shows the usage(how to compare a train and eval metric in a same graph). | ||
Examples | ||
-------- | ||
>>> # log train and eval metrics under different directories. | ||
>>> training_log = 'logs/train' | ||
>>> evaluation_log = 'logs/eval' | ||
>>> # in this case, each training and evaluation metric pairs has same name, | ||
>>> # you can add a prefix to make it separate. | ||
>>> batch_end_callbacks = [mx.tensorboard.LogMetricsCallback(training_log)] | ||
>>> eval_end_callbacks = [mx.tensorboard.LogMetricsCallback(evaluation_log)] | ||
>>> # run | ||
>>> model.fit(train, | ||
>>> ... | ||
>>> batch_end_callback = batch_end_callbacks, | ||
>>> eval_end_callback = eval_end_callbacks) | ||
>>> # Then use `tensorboard --logdir=logs/` to launch TensorBoard visualization. | ||
""" | ||
def __init__(self, logging_dir, prefix=None): | ||
self.prefix = prefix | ||
try: | ||
from tensorboard import SummaryWriter | ||
self.summary_writer = SummaryWriter(logging_dir) | ||
except ImportError: | ||
logging.error('You can install tensorboard via `pip install tensorboard`.') | ||
|
||
def __call__(self, param): | ||
"""Callback to log training speed and metrics in TensorBoard.""" | ||
if param.eval_metric is None: | ||
return | ||
name_value = param.eval_metric.get_name_value() | ||
for name, value in name_value: | ||
if self.prefix is not None: | ||
name = '%s-%s' % (self.prefix, name) | ||
self.summary_writer.add_scalar(name, value) |