Skip to content

Commit

Permalink
Merge 3798a62 into a3e5e03
Browse files Browse the repository at this point in the history
  • Loading branch information
Dai-Wenxun committed Feb 19, 2023
2 parents a3e5e03 + 3798a62 commit b7fe801
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 11 deletions.
6 changes: 3 additions & 3 deletions mmengine/runner/runner.py
Expand Up @@ -1167,7 +1167,7 @@ def build_param_scheduler(
in runner, ``build_param_scheduler`` will return a dict containing
the same keys with multiple optimizers and each value is a list of
parameter schedulers. Note that, if you want different optimizers to
use different parameter shedulers to update optimizer's
use different parameter schedulers to update optimizer's
hyper-parameters, the input parameter ``scheduler`` also needs to be
a dict and its key are consistent with multiple optimizers.
Otherwise, the same parameter schedulers will be used to update
Expand Down Expand Up @@ -1198,7 +1198,7 @@ def build_param_scheduler(
<mmengine.optim.scheduler.lr_scheduler.StepLR at 0x7f70f6eb6150>]
Above examples only provide the case of one optimizer and one scheduler
or multiple shedulers. If you want to know how to set parameter
or multiple schedulers. If you want to know how to set parameter
scheduler when using multiple optimizers, you can find more examples
`optimizer-docs`_.
Expand Down Expand Up @@ -2233,7 +2233,7 @@ def _check_scheduler_cfg(
if is_seq_of(param_scheduler, dict):
for _param_scheduler in param_scheduler:
assert 'type' in _param_scheduler, (
'Each parameter sheduler should contain the key type, '
'Each parameter scheduler should contain the key type, '
f'but got {_param_scheduler}')
elif isinstance(param_scheduler, dict):
if 'type' not in param_scheduler:
Expand Down
14 changes: 7 additions & 7 deletions tests/test_registry/test_build_functions.py
Expand Up @@ -189,7 +189,7 @@ def pseudo_build(cfg):


@pytest.mark.skipif(not is_installed('torch'), reason='tests requires torch')
def test_build_sheduler_from_cfg():
def test_build_scheduler_from_cfg():
import torch.nn as nn
from torch.optim import SGD
model = nn.Conv2d(1, 1, 1)
Expand All @@ -200,9 +200,9 @@ def test_build_sheduler_from_cfg():
param_name='lr',
begin=0,
end=100)
sheduler = PARAM_SCHEDULERS.build(cfg)
assert sheduler.begin == 0
assert sheduler.end == 100
scheduler = PARAM_SCHEDULERS.build(cfg)
assert scheduler.begin == 0
assert scheduler.end == 100

cfg = dict(
type='LinearParamScheduler',
Expand All @@ -213,6 +213,6 @@ def test_build_sheduler_from_cfg():
end=100,
epoch_length=10)

sheduler = PARAM_SCHEDULERS.build(cfg)
assert sheduler.begin == 0
assert sheduler.end == 1000
scheduler = PARAM_SCHEDULERS.build(cfg)
assert scheduler.begin == 0
assert scheduler.end == 1000
2 changes: 1 addition & 1 deletion tests/test_runner/test_runner.py
Expand Up @@ -2303,7 +2303,7 @@ def test_checkpoint(self):
MultiStepLR)
self.assertIsInstance(runner.param_schedulers['linear2'][0], StepLR)

# 2.7.3 test `resume` 2 optimizers and 0 sheduler list.
# 2.7.3 test `resume` 2 optimizers and 0 scheduler list.
cfg = copy.deepcopy(self.epoch_based_cfg)
cfg.experiment_name = 'test_checkpoint18'
cfg.optim_wrapper = optim_cfg
Expand Down

0 comments on commit b7fe801

Please sign in to comment.