Skip to content

Commit

Permalink
Merge pull request #1388 from BenCrulis/retain_graph
Browse files Browse the repository at this point in the history
Add a variable to control whether to retain the graph or not in the backward pass
  • Loading branch information
AntonioCarta committed May 29, 2023
2 parents 5be267a + 969e23f commit 647c154
Showing 1 changed file with 4 additions and 1 deletion.
5 changes: 4 additions & 1 deletion avalanche/training/templates/base_sgd.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,9 @@ def __init__(
)
""" Eval mini-batch size. """

self.retain_graph: bool = False
""" Retain graph when calling loss.backward(). """

if evaluator is None:
evaluator = EvaluationPlugin()
elif callable(evaluator):
Expand Down Expand Up @@ -220,7 +223,7 @@ def training_epoch(self, **kwargs):

def backward(self):
"""Run the backward pass."""
self.loss.backward()
self.loss.backward(retain_graph=self.retain_graph)

def optimizer_step(self):
"""Execute the optimizer step (weights update)."""
Expand Down

0 comments on commit 647c154

Please sign in to comment.