Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
zheyuye committed Jul 9, 2020
1 parent e4fba39 commit 1d374a2
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 5 deletions.
4 changes: 2 additions & 2 deletions scripts/question_answering/run_squad.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,8 +374,8 @@ def untune_params(model, untunable_depth, not_included=[]):
A list or parameter names that not included in the untunable parameters
"""
all_layers = model.backbone.encoder.all_encoder_layers
for _, v in model.collect_params('.*embed*').items():
model.grad_req = 'null'
for _, value in model.collect_params('.*embed*').items():
value.grad_req = 'null'

for layer in all_layers[:untunable_depth]:
for key, value in layer.collect_params().items():
Expand Down
5 changes: 2 additions & 3 deletions src/gluonnlp/models/electra.py
Original file line number Diff line number Diff line change
Expand Up @@ -392,9 +392,8 @@ def apply_layerwise_decay(self, layerwise_decay, not_included=[]):
# consider the task specific finetuning layer as the last layer, following with pooler
# In addition, the embedding parameters have the smaller learning rate based on this setting.
max_depth = self.num_layers
for key, value in self.collect_params().items():
if 'embed' in key:
value.lr_mult = layerwise_decay**(max_depth + 1)
for _, value in self.collect_params('.*embed*').items():
value.lr_mult = layerwise_decay**(max_depth + 1)

for (layer_depth, layer) in enumerate(self.encoder.all_encoder_layers):
layer_params = layer.collect_params()
Expand Down

0 comments on commit 1d374a2

Please sign in to comment.