Skip to content

Commit

Permalink
style: Log the forced multi-GPU hyperparameters
Browse files Browse the repository at this point in the history
  • Loading branch information
saattrupdan committed Nov 22, 2023
1 parent 52733ea commit 6d1b7f4
Showing 1 changed file with 14 additions and 6 deletions.
20 changes: 14 additions & 6 deletions src/scripts/finetune_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,14 @@
import hydra
from omegaconf import DictConfig
import os
import logging

from coral_models.finetune import finetune


logger = logging.getLogger(__name__)


@hydra.main(config_path="../../config", config_name="config", version_base=None)
def main(cfg: DictConfig) -> None:
"""Finetune an ASR model.
Expand All @@ -22,14 +26,18 @@ def main(cfg: DictConfig) -> None:
# In case we are running in a multi-GPU setting, we need to force certain
# hyperparameters
if os.getenv("WORLD_SIZE") is not None:
if "layerdrop" in cfg.model:
if "layerdrop" in cfg.model and cfg.model.layerdrop != 0.0:
logger.info(
"Forcing layerdrop to 0.0 as this is required in a multi-GPU training"
)
cfg.model.layerdrop = 0.0
cfg.padding = "max_length"

import logging
if cfg.padding != "max_length":
logger.info(
"Forcing padding to 'max_length' as this is required in a multi-GPU "
"training"
)
cfg.padding = "max_length"

logger = logging.getLogger(__name__)
logger.info(cfg)
finetune(cfg)


Expand Down

0 comments on commit 6d1b7f4

Please sign in to comment.