Skip to content
This repository has been archived by the owner on Nov 3, 2023. It is now read-only.

Fix a lazy typo that wasn't tested. #3744

Merged
merged 1 commit into from Jun 28, 2021
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions parlai/core/torch_generator_agent.py
Expand Up @@ -580,7 +580,7 @@ def set_interactive_mode(self, mode, shared=False):

def _cache_dummy_batch(self, batch: Batch):
"""
Cache a batch to be used as a dummy during _fake_forward_pass.
Cache a batch to be used as a dummy during _fake_forward_backward_pass.
"""
if not hasattr(self, '_dummy_batch'):
self._dummy_batch = batch
Expand Down Expand Up @@ -750,7 +750,7 @@ def train_step(self, batch):

# gradients are synced on backward, now this model is going to be
# out of sync! catch up with the other workers
self._fake_forward_pass()
self._fake_forward_backward_pass()

def _construct_token_losses(self, labels, model_output):
# Get non-aggregated losses
Expand Down