Skip to content

Commit

Permalink
feat: Updated dspy/teleprompt/finetune.py
Browse files Browse the repository at this point in the history
  • Loading branch information
sweep-ai[bot] committed Dec 20, 2023
1 parent 09314f6 commit 215893d
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions dspy/teleprompt/finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def __init__(self, metric=None, teacher_settings={}, multitask=True):
teacher_settings=teacher_settings)


def compile(self, student, *, teacher=None, trainset, valset=None,
async def compile(self, student, *, teacher=None, trainset, valset=None,
target='t5-large', bsize=12, accumsteps=1, lr=5e-5, epochs=1, bf16=False, int8=False, peft=False, path_prefix=None):

# It's usually better to supply a few-shot teacher, rather than uncompiled module (the student).
Expand All @@ -72,7 +72,7 @@ def compile(self, student, *, teacher=None, trainset, valset=None,

for teacher in teachers:
# Dummy compilation to get bootstraps.
compiled = self.teleprompter.compile(student, teacher=teacher, trainset=trainset)
compiled = await self.teleprompter.compile(student, teacher=teacher, trainset=trainset)
multitask = self.multitask

# Prepare finetune <prompt, completion> pairs.
Expand Down Expand Up @@ -141,7 +141,7 @@ def compile(self, student, *, teacher=None, trainset, valset=None,
training_data_path = finetune_paths[name]
compiler_config_ = dict(compiler_config)
compiler_config_['save'] = compiler_config['save'] + '.' + name
best_ckpt_path = finetune_hf(training_data_path, target, compiler_config_)
best_ckpt_path = await finetune_hf(training_data_path, target, compiler_config_)

print(f"#> Best checkpoint path: {best_ckpt_path} for {name}")
finetune_models[name] = dsp.HFModel(model=target, checkpoint=best_ckpt_path) # best_ckpt_path
Expand Down

0 comments on commit 215893d

Please sign in to comment.