From f32b361c8e3febb79cc25330e85d564eccc63872 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Monk=20=28looking=20for=20PhD=20Fall=E2=80=9924=29?= Date: Thu, 18 Apr 2024 22:43:33 +0530 Subject: [PATCH 1/2] Create qlora.yml --- examples/llama-3/qlora.yml | 66 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 examples/llama-3/qlora.yml diff --git a/examples/llama-3/qlora.yml b/examples/llama-3/qlora.yml new file mode 100644 index 000000000..3c81cd7b7 --- /dev/null +++ b/examples/llama-3/qlora.yml @@ -0,0 +1,66 @@ +base_model: meta-llama/Meta-Llama-3-8B +model_type: AutoModelForCausalLM +tokenizer_type: AutoTokenizer + +load_in_8bit: false +load_in_4bit: true +strict: false + +datasets: + - path: aaditya/alpaca_subset_1 + type: alpaca +dataset_prepared_path: +val_set_size: 0.05 +output_dir: ./qlora-out + +adapter: qlora +lora_model_dir: + +sequence_len: 4096 +sample_packing: true +pad_to_sequence_len: true + +lora_r: 32 +lora_alpha: 16 +lora_dropout: 0.05 +lora_target_modules: +lora_target_linear: true +lora_fan_in_fan_out: + +wandb_project: +wandb_entity: +wandb_watch: +wandb_name: +wandb_log_model: + +gradient_accumulation_steps: 4 +micro_batch_size: 2 +num_epochs: 4 +optimizer: paged_adamw_32bit +lr_scheduler: cosine +learning_rate: 0.0002 + +train_on_inputs: false +group_by_length: false +bf16: auto +fp16: +tf32: false + +gradient_checkpointing: true +early_stopping_patience: +resume_from_checkpoint: +local_rank: +logging_steps: 1 +xformers_attention: +flash_attention: true + +warmup_steps: 10 +evals_per_epoch: 4 +eval_table_size: +saves_per_epoch: 1 +debug: +deepspeed: +weight_decay: 0.0 +fsdp: +fsdp_config: +special_tokens: From 031d83b30d690a0e6801a557f7bb6675ff0e1627 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Monk=20=28looking=20for=20PhD=20Fall=E2=80=9924=29?= Date: Thu, 18 Apr 2024 23:02:57 +0530 Subject: [PATCH 2/2] Update qlora.yml --- examples/llama-3/qlora.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/llama-3/qlora.yml b/examples/llama-3/qlora.yml index 3c81cd7b7..9cedee8ee 100644 --- a/examples/llama-3/qlora.yml +++ b/examples/llama-3/qlora.yml @@ -10,7 +10,7 @@ datasets: - path: aaditya/alpaca_subset_1 type: alpaca dataset_prepared_path: -val_set_size: 0.05 +val_set_size: 0 output_dir: ./qlora-out adapter: qlora @@ -64,3 +64,4 @@ weight_decay: 0.0 fsdp: fsdp_config: special_tokens: + pad_token: "<|end_of_text|>"