Skip to content

Commit

Permalink
fix mindspore dynamic lr
Browse files Browse the repository at this point in the history
  • Loading branch information
hanjr92 committed Feb 13, 2023
1 parent 7d58fb9 commit 6180c3b
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 1 deletion.
4 changes: 4 additions & 0 deletions docs/modules/nn.rst
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,10 @@ Batch Normalization 3D
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. autoclass:: BatchNorm3d

Layer Normalization
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. autoclass:: LayerNorm

.. -----------------------------------------------------------
.. Padding Layers
.. -----------------------------------------------------------
Expand Down
3 changes: 2 additions & 1 deletion tensorlayerx/optimizers/lr/mindspore_lr.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,11 @@ def __init__(self, learning_rate=0.1, last_epoch=-1, verbose=False):
self.base_lr = learning_rate
self.last_lr = learning_rate
self.verbose = verbose
self.cast = P.Cast()

def construct(self, global_step):

return self.last_lr
return self.cast(self.last_lr, mstype.float32)

def step(self, epoch=None):
if epoch is None:
Expand Down

0 comments on commit 6180c3b

Please sign in to comment.