Skip to content

Commit

Permalink
Added config file for hparam tuning
Browse files Browse the repository at this point in the history
  • Loading branch information
Ayuei committed Mar 30, 2023
1 parent 30c4215 commit 91c921a
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 1 deletion.
40 changes: 40 additions & 0 deletions examples/hparam_tuning/hparam_cfg.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
{
"lr": {
"type": "float",
"low": 1e-6,
"high": 1e-2,
"log": true
},
"warmup_steps": {
"type": "int",
"low": 0,
"high": 1000
},
"optimizer": {
"type": "categorical",
"choices": [
"Adam",
"RAdam",
"Yogi"
]
},
"scheduler": {
"type": "categorical",
"choices": [
"constantlr",
"warmupconstant",
"warmuplinear",
"warmupcosine",
"warmupcosinewithhardrestarts"
]
},
"model_name": {
"type": "categorical",
"choices": [
"squeezebert/squeezebert-uncased"
]
},
"batch_size": 32,
"epochs": 5,
"loss_fn": "OnlineContrastiveLoss"
}
2 changes: 1 addition & 1 deletion examples/hparam_tuning/hparam_tuning_from_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def load_dataset():

if __name__ == "__main__":
hparam_config = HparamConfig.from_json(
"hparam_config.json"
"hparam_cfg.json"
)

trainer = SentenceTransformerHparamTrainer(
Expand Down

0 comments on commit 91c921a

Please sign in to comment.