Skip to content

Commit

Permalink
clean up optimizer from unused functions
Browse files Browse the repository at this point in the history
  • Loading branch information
thomwolf committed Nov 9, 2018
1 parent 34a1a01 commit 9e95cd8
Showing 1 changed file with 0 additions and 21 deletions.
21 changes: 0 additions & 21 deletions optimization.py
Expand Up @@ -90,27 +90,6 @@ def get_lr(self):
lr.append(lr_scheduled)
return lr

def to(self, device):
""" Move the optimizer state to a specified device"""
for state in self.state.values():
state['exp_avg'].to(device)
state['exp_avg_sq'].to(device)

def initialize_step(self, initial_step):
"""Initialize state with a defined step (but we don't have stored averaged).
Arguments:
initial_step (int): Initial step number.
"""
for group in self.param_groups:
for p in group['params']:
state = self.state[p]
# State initialization
state['step'] = initial_step
# Exponential moving average of gradient values
state['exp_avg'] = torch.zeros_like(p.data)
# Exponential moving average of squared gradient values
state['exp_avg_sq'] = torch.zeros_like(p.data)

def step(self, closure=None):
"""Performs a single optimization step.
Expand Down

0 comments on commit 9e95cd8

Please sign in to comment.