Skip to content

Commit

Permalink
correct treatment of secont optim
Browse files Browse the repository at this point in the history
  • Loading branch information
Jemoka committed Jan 30, 2024
1 parent c2390fb commit bce2c9d
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions stanza/models/depparse/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,14 @@ def __init__(self, args=None, vocab=None, pretrain=None, model_file=None,
wandb.watch(self.model, log_freq=4, log="all", log_graph=True)

def __init_optim(self):
if not self.args.get("second_stage", False) and self.args.get('second_optim'):
if not (self.args.get("second_stage", False) and self.args.get('second_optim')):
self.optimizer = utils.get_optimizer(self.args['optim'], self.model,
self.args['lr'], betas=(0.9, self.args['beta2']),
eps=1e-6, bert_learning_rate=self.args.get('bert_learning_rate', 0.0))
else:
self.optimizer = utils.get_optimizer(self.args['second_optim'], self.model,
self.args['second_lr'], betas=(0.9, self.args['beta2']), eps=1e-6,
bert_learning_rate=self.args.get('second_bert_learning_rate', 0.0))
self.args['second_lr'], betas=(0.9, self.args['beta2']), eps=1e-6,
bert_learning_rate=self.args.get('second_bert_learning_rate', 0.0))


def update(self, batch, eval=False):
Expand Down

0 comments on commit bce2c9d

Please sign in to comment.