Skip to content
This repository has been archived by the owner on Apr 27, 2023. It is now read-only.

Commit

Permalink
nan loss correction by reducing learning rate
Browse files Browse the repository at this point in the history
  • Loading branch information
Chi Chen committed Apr 29, 2019
1 parent b9fe36e commit 34f64a9
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion megnet/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from megnet.losses import mse_scale
from keras.regularizers import l2
from keras.models import Model
from megnet.callbacks import ModelCheckpointMAE, ManualStop
from megnet.callbacks import ModelCheckpointMAE, ManualStop, ReduceLRUponNan
from megnet.data.graph import GraphBatchDistanceConvert, GraphBatchGenerator, GaussianDistance
from megnet.data.crystal import CrystalGraph
import numpy as np
Expand Down Expand Up @@ -106,6 +106,7 @@ def train_from_graphs(self,
if callbacks is None:
# with this call back you can stop the model training by `touch STOP`
callbacks = [ManualStop()]
callbacks.append(ReduceLRUponNan())
train_targets = np.array(train_targets).ravel()
if validation_graphs is not None:
filepath = pjoin(dirname, 'val_mae_{epoch:05d}_{%s:.6f}.hdf5' % monitor)
Expand Down

0 comments on commit 34f64a9

Please sign in to comment.