Skip to content

Commit

Permalink
F841 lint error fixed
Browse files Browse the repository at this point in the history
  • Loading branch information
Demirrr committed Nov 30, 2024
1 parent e8dddb6 commit 1dc3000
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions dicee/trainer/model_parallelism.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,9 +167,9 @@ def fit(self, *args, **kwargs):
timeout=0,
worker_init_fn=None,
persistent_workers=False)
if batch_rt is not None:
expected_training_time=batch_rt * len(train_dataloader) * self.attributes.num_epochs
# print(f"Exp.Training Runtime: {expected_training_time/60 :.3f} in mins\t|\tBatch Size:{batch_size}\t|\tBatch RT:{batch_rt:.3f}\t|\t # of batches:{len(train_dataloader)}\t|\t# of epochs:{self.attributes.num_epochs}")
#if batch_rt is not None:
# expected_training_time=batch_rt * len(train_dataloader) * self.attributes.num_epochs
# print(f"Exp.Training Runtime: {expected_training_time/60 :.3f} in mins\t|\tBatch Size:{batch_size}\t|\tBatch RT:{batch_rt:.3f}\t|\t # of batches:{len(train_dataloader)}\t|\t# of epochs:{self.attributes.num_epochs}")

# () Number of batches to reach a single epoch.
num_of_batches = len(train_dataloader)
Expand Down

0 comments on commit 1dc3000

Please sign in to comment.