Skip to content

Commit

Permalink
Merge pull request #1954 from borisdayma/feat_wandb
Browse files Browse the repository at this point in the history
feat: add logging through W&B
  • Loading branch information
Bharath Ramsundar committed Jul 8, 2020
2 parents af2db87 + fd17415 commit eeec96f
Show file tree
Hide file tree
Showing 4 changed files with 53 additions and 0 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ DeepChem has a number of "soft" requirements. These are packages which are neede
- [RDKit](http://www.rdkit.org/docs/Install.html)
- [simdna](https://github.com/kundajelab/simdna)
- [XGBoost](https://xgboost.readthedocs.io/en/latest/)
- [Weights & Biases](https://docs.wandb.com/)
- [Tensorflow Probability](https://www.tensorflow.org/probability)

## Installation
Expand Down
6 changes: 6 additions & 0 deletions deepchem/models/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@
import tensorflow as tf
import sys

from deepchem.models.keras_model import is_wandb_available
if is_wandb_available():
import wandb


class ValidationCallback(object):
"""Performs validation while training a KerasModel.
Expand Down Expand Up @@ -81,6 +85,8 @@ def __call__(self, model, step):
if model.tensorboard:
for key in scores:
model._log_value_to_tensorboard(tag=key, simple_value=scores[key])
if model.wandb:
wandb.log(scores, step=step)
if self.save_dir is not None:
score = scores[self.metrics[self.save_metric].name]
if not self.save_on_minimum:
Expand Down
31 changes: 31 additions & 0 deletions deepchem/models/keras_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,23 @@
from deepchem.trans import undo_transforms
from deepchem.utils.evaluate import GeneratorEvaluator

try:
import wandb
wandb.ensure_configured()
if wandb.api.api_key is None:
_has_wandb = False
wandb.termwarn(
"W&B installed but not logged in. Run `wandb login` or set the WANDB_API_KEY env variable."
)
else:
_has_wandb = True
except (ImportError, AttributeError):
_has_wandb = False


def is_wandb_available():
return _has_wandb


class KerasModel(Model):
"""This is a DeepChem model implemented by a Keras model.
Expand Down Expand Up @@ -104,6 +121,7 @@ def __init__(self,
learning_rate=0.001,
optimizer=None,
tensorboard=False,
wandb=False,
log_frequency=100,
**kwargs):
"""Create a new KerasModel.
Expand All @@ -130,6 +148,8 @@ def __init__(self,
ignored.
tensorboard: bool
whether to log progress to TensorBoard during training
wandb: bool
whether to log progress to Weights & Biases during training
log_frequency: int
The frequency at which to log data. Data is logged using
`logging` by default. If `tensorboard` is set, data is also
Expand All @@ -151,6 +171,15 @@ def __init__(self,
else:
self.optimizer = optimizer
self.tensorboard = tensorboard

# W&B logging
if wandb and not is_wandb_available():
logger.warning(
"You set wandb to True but W&B is not installed. To use wandb logging, "
"run `pip install wandb; wandb login` see https://docs.wandb.com/huggingface."
)
self.wandb = wandb and is_wandb_available()

# Backwards compatibility
if "tensorboard_log_frequency" in kwargs:
logger.warning(
Expand Down Expand Up @@ -375,6 +404,8 @@ def fit_generator(self,
if self.tensorboard and should_log:
with self._summary_writer.as_default():
tf.summary.scalar('loss', batch_loss, current_step)
if self.wandb and should_log:
wandb.log({'loss': batch_loss}, step=current_step)

# Report final results.
if averaged_batches > 0:
Expand Down
15 changes: 15 additions & 0 deletions docs/models.rst
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,23 @@ KerasModel
----------
DeepChem extensively uses `Keras`_ to build powerful machine learning models.

Training loss and validation metrics can be automatically logged to `Weights & Biases`_ with the following commands::

# Install wandb in shell
pip install wandb

# Login in shell (required only once)
wandb login

# Start a W&B run in your script (refer to docs for optional parameters)
wandb.init(project="my project")

# Set `wandb` arg when creating `KerasModel`
model = KerasModel(…, wandb=True)

.. _`Keras`: https://keras.io/

.. _`Weights & Biases`: http://docs.wandb.com/

.. autoclass:: deepchem.models.KerasModel
:members:
Expand Down

0 comments on commit eeec96f

Please sign in to comment.