From 169905935e52d17cac58b4c1671dbf6db2119e52 Mon Sep 17 00:00:00 2001 From: Valentin Pratz Date: Mon, 21 Nov 2022 18:38:13 +0100 Subject: [PATCH] Temporary fix: add lr_adjuster attribute to trainer to fix errors in saving. Depending how this will be adapted in the future, removing all references to lr_adjuster might be the better option --- bayesflow/trainers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bayesflow/trainers.py b/bayesflow/trainers.py index f9839ab24..7b71b1990 100644 --- a/bayesflow/trainers.py +++ b/bayesflow/trainers.py @@ -152,6 +152,8 @@ def __init__(self, amortizer, generative_model=None, configurator=None, checkpoi self.replay_buffer = None self.optimizer = None self.default_lr = default_lr + # Currently unused attribute + self.lr_adjuster = None # Checkpoint and helper classes settings self.max_to_keep = max_to_keep