From 38f0d30e2b55043112cc52338708dfce344fcbe8 Mon Sep 17 00:00:00 2001 From: fealho Date: Fri, 5 Mar 2021 11:12:46 -0800 Subject: [PATCH] Remove `self.trained_epochs` (#134) * Expose hyperparameters/change cuda logic * Fix set_device/update documentation * Remove self from discriminator * Fix optimizers * Remove self from discriminator * Remove "_" from variables * Remove self.trained_epochs variable Co-authored-by: Carles Sala --- ctgan/synthesizers/ctgan.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ctgan/synthesizers/ctgan.py b/ctgan/synthesizers/ctgan.py index 0ce4172f..e2b85612 100644 --- a/ctgan/synthesizers/ctgan.py +++ b/ctgan/synthesizers/ctgan.py @@ -152,7 +152,6 @@ def __init__(self, embedding_dim=128, generator_dim=(256, 256), discriminator_di self._log_frequency = log_frequency self._verbose = verbose self._epochs = epochs - self.trained_epochs = 0 self.pac = pac if not cuda or not torch.cuda.is_available(): @@ -330,7 +329,6 @@ def fit(self, train_data, discrete_columns=tuple(), epochs=None): steps_per_epoch = max(len(train_data) // self._batch_size, 1) for i in range(epochs): - self.trained_epochs += 1 for id_ in range(steps_per_epoch): for n in range(self._discriminator_steps):