conda create -n sae python=3.10pip install -r requirements.txtCUDA_VISIBLE_DEVICES=0 python save_activations.py \
--model_path meta-llama/Llama-3.1-8B-Instruct \
--file_path data/freeinstruct/Raw-Magic.json \
--dataset freeinstruct \
--template_idx 0 \
--tag realistic0 CUDA_VISIBLE_DEVICES=0 python select_neuron.py \
--model_name Meta-Llama-3.1-8B-Instruct \
--tag realistic0 \
--dataset freeinstruct \
--tag2 pbCUDA_VISIBLE_DEVICES=0 python bayesian_optimization.py \
--model_path meta-llama/Meta-Llama-3.1-8B-Instruct \
--file_path data/freeinstruct/freeinstruct.json \
--dataset freeinstruct \
--method sae \
--length 30 \
--topk 15 \
--n_call 50 \
--tag realistic0 \
--tag2 pbmix \
--alpha 1 \
--demo \
--icl_path data/freeinstruct/ICL_example.jsonl \
--log bys_fi_top15_r1_few_pbmix_llama_sae0Before generating, you need to add the optimization results to opt_value.py
CUDA_VISIBLE_DEVICES=0 python generation.py \
--model_path mmeta-llama/Meta-Llama-3.1-8B-Instruct \
--file_path data/freeinstruct/freeinstruct.json \
--dataset 'freeinstruct' \
--save_path 'results/freeinstruct/Meta-Llama-3.1-8B-Instruct/freeinstruct_opt_top15_pbmix_sae0.json' \
--method sae \
--tag realistic0 \
--tag2 pbmix \
--top 15 \
--opt \
--length 150
--demo \
--icl_path data/freeinstruct/ICL_example.jsonl \
--max_new_tokens 128Make sure to put your OpenAI API key in a .env file at the project root.
python evaluation.py \
--file_path 'results/freeinstruct/Llama-3.1-8B-Instruct/freeinstruct_opt_top15_pbmix_sae0.json' \
--save_path 'results/freeinstruct/Llama-3.1-8B-Instruct/freeinstruct_opt_top15_pbmix_sae0_eval.json' \
--dataset 'freeinstruct' \
--eval_type normal \
--method sae