Skip to content

Commit

Permalink
removing timing prints
Browse files Browse the repository at this point in the history
  • Loading branch information
bengioe committed May 9, 2024
1 parent f859640 commit d536233
Showing 1 changed file with 0 additions and 6 deletions.
6 changes: 0 additions & 6 deletions src/gflownet/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,8 +279,6 @@ def run(self, logger=None):
num_training_steps = self.cfg.num_training_steps
logger.info("Starting training")
start_time = time.time()
t0 = time.time()
times = []
for it, batch in zip(range(start, 1 + num_training_steps), cycle(train_dl)):
# the memory fragmentation or allocation keeps growing, how often should we clean up?
# is changing the allocation strategy helpful?
Expand All @@ -289,10 +287,6 @@ def run(self, logger=None):
gc.collect()
torch.cuda.empty_cache()
batch = self._maybe_resolve_shared_buffer(batch, train_dl)
t1 = time.time()
times.append(t1 - t0)
print(f"iteration {it} : {t1 - t0:.2f} s, average: {np.mean(times):.2f} s")
t0 = t1
epoch_idx = it // epoch_length
batch_idx = it % epoch_length
if self.replay_buffer is not None and len(self.replay_buffer) < self.replay_buffer.warmup:
Expand Down

0 comments on commit d536233

Please sign in to comment.