Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
LZHgrla committed Feb 25, 2024
1 parent 43ffda5 commit 3661fbd
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 18 deletions.
4 changes: 4 additions & 0 deletions mmengine/optim/optimizer/default_constructor.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,10 @@ def add_params(self,
level=logging.WARNING)
continue
if not param.requires_grad:
print_log((f'{prefix}.{name} is skipped since its '
f'requires_grad={param.requires_grad}'),
logger='current',
level=logging.WARNING)
continue

# if the parameter match one of the custom keys, ignore other rules
Expand Down
24 changes: 6 additions & 18 deletions tests/test_optim/test_optimizer/test_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -549,7 +549,8 @@ def test_default_optimizer_constructor_with_empty_paramwise_cfg(self):
weight_decay=self.base_wd,
momentum=self.momentum))
paramwise_cfg = dict()
optim_constructor = DefaultOptimWrapperConstructor(optim_wrapper_cfg)
optim_constructor = DefaultOptimWrapperConstructor(
optim_wrapper_cfg, paramwise_cfg)
optim_wrapper = optim_constructor(model)
self._check_default_optimizer(optim_wrapper.optimizer, model)

Expand Down Expand Up @@ -595,19 +596,8 @@ def test_default_optimizer_constructor_no_grad(self):
param.requires_grad = False
optim_constructor = DefaultOptimWrapperConstructor(
optim_wrapper_cfg, paramwise_cfg)
optim_wrapper = optim_constructor(self.model)
optimizer = optim_wrapper.optimizer
param_groups = optimizer.param_groups
assert isinstance(optim_wrapper.optimizer, torch.optim.SGD)
assert optimizer.defaults['lr'] == self.base_lr
assert optimizer.defaults['momentum'] == self.momentum
assert optimizer.defaults['weight_decay'] == self.base_wd
for i, (name, param) in enumerate(self.model.named_parameters()):
param_group = param_groups[i]
assert torch.equal(param_group['params'][0], param)
assert param_group['momentum'] == self.momentum
assert param_group['lr'] == self.base_lr
assert param_group['weight_decay'] == self.base_wd
with self.assertRaises(ValueError):
optim_constructor(self.model)

def test_default_optimizer_constructor_bypass_duplicate(self):
# paramwise_cfg with bypass_duplicate option
Expand Down Expand Up @@ -663,10 +653,8 @@ def test_default_optimizer_constructor_bypass_duplicate(self):
optim_wrapper = optim_constructor(model)
model_parameters = list(model.parameters())
num_params = 14 if MMCV_FULL_AVAILABLE else 11
assert len(optim_wrapper.optimizer.param_groups) == len(
model_parameters) == num_params
self._check_sgd_optimizer(optim_wrapper.optimizer, model,
**paramwise_cfg)
assert len(optim_wrapper.optimizer.param_groups
) == len(model_parameters) - 1 == num_params - 1

def test_default_optimizer_constructor_custom_key(self):
# test DefaultOptimWrapperConstructor with custom_keys and
Expand Down

0 comments on commit 3661fbd

Please sign in to comment.