From b05c4ef4e69d64290d03780058236c271c1beda3 Mon Sep 17 00:00:00 2001 From: Irene Dea Date: Thu, 24 Oct 2024 10:49:28 -0700 Subject: [PATCH] Use fun default composer run names (#1611) Co-authored-by: Mihir Patel --- llmfoundry/command_utils/train.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/llmfoundry/command_utils/train.py b/llmfoundry/command_utils/train.py index cb287b029c..2a5e95c6a1 100644 --- a/llmfoundry/command_utils/train.py +++ b/llmfoundry/command_utils/train.py @@ -311,10 +311,11 @@ def train(cfg: DictConfig) -> Trainer: eval_gauntlet_config = train_cfg.eval_gauntlet or train_cfg.eval_gauntlet_str # Optional parameters will be set to default values if not specified. - env_run_name: Optional[str] = os.environ.get('RUN_NAME', None) - run_name: str = ( - train_cfg.run_name if train_cfg.run_name else env_run_name - ) or 'llm' + run_name: Optional[ + str] = train_cfg.run_name if train_cfg.run_name else os.environ.get( + 'RUN_NAME', + None, + ) is_state_dict_sharded: bool = ( fsdp_config.get('state_dict_type', 'full') == 'sharded' ) if fsdp_config else False @@ -322,9 +323,8 @@ def train(cfg: DictConfig) -> Trainer: save_filename: str = train_cfg.save_filename if train_cfg.save_filename else 'ep{epoch}-ba{batch}-rank{rank}.pt' # Enable autoresume from model checkpoints if possible - is_user_set_run_name: bool = train_cfg.run_name is not None or env_run_name is not None autoresume_default: bool = False - if is_user_set_run_name and \ + if run_name is not None and \ train_cfg.save_folder is not None \ and not train_cfg.save_overwrite \ and not train_cfg.save_weights_only: