Skip to content

Commit

Permalink
Moving last bit over to finetuning file
Browse files Browse the repository at this point in the history
  • Loading branch information
lucapericlp committed Mar 13, 2024
1 parent a5bf07d commit c376ead
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 19 deletions.
19 changes: 0 additions & 19 deletions fam/llm/config/finetune_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,25 +67,6 @@
ddp_world_size = 1
tokens_per_iter = gradient_accumulation_steps * ddp_world_size * batch_size * block_size

print(f"tokens per iteration will be: {tokens_per_iter:,}")

ckpts_base_dir = pathlib.Path(__file__).resolve().parent / "ckpts"
if not os.path.exists(ckpts_base_dir) and master_process:
raise Exception(f"ckpts dir {ckpts_base_dir} does not exist!")

if master_process:
if "/" in out_dir:
raise Exception("out_dir should be just a name, not a path with slashes")

ckpts_save_dir = ckpts_base_dir / out_dir
os.makedirs(ckpts_save_dir, exist_ok=True)

causal = True
bias: bool = False # do we use bias inside LayerNorm and Linear layers?
spk_emb_on_text: bool = True # whether to add speaker embedding conditioning to text tokens or not


def get_globals_state():
""" Return entirety of configuration global state which can be used for logging. """
config_keys = [k for k, v in globals().items() if not k.startswith("_") and isinstance(v, (int, float, bool, str))]
return {k: globals()[k] for k in config_keys} # will be useful for logging
18 changes: 18 additions & 0 deletions fam/llm/finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,24 @@
]
ctx = nullcontext() if device_type == "cpu" else torch.amp.autocast(device_type=device_type, dtype=ptdtype)

print(f"tokens per iteration will be: {tokens_per_iter:,}")

ckpts_base_dir = pathlib.Path(__file__).resolve().parent / "ckpts"
if not os.path.exists(ckpts_base_dir) and master_process:
raise Exception(f"ckpts dir {ckpts_base_dir} does not exist!")

if master_process:
if "/" in out_dir:
raise Exception("out_dir should be just a name, not a path with slashes")

ckpts_save_dir = ckpts_base_dir / out_dir
os.makedirs(ckpts_save_dir, exist_ok=True)

def get_globals_state():
""" Return entirety of configuration global state which can be used for logging. """
config_keys = [k for k, v in globals().items() if not k.startswith("_") and isinstance(v, (int, float, bool, str))]
return {k: globals()[k] for k in config_keys} # will be useful for logging

model_args: dict = dict(
n_layer=n_layer,
n_head=n_head,
Expand Down

0 comments on commit c376ead

Please sign in to comment.