File size: 990 Bytes
9d74e50 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
model_family: llama-7b
model_path: openlm-research/open_llama_7b
LoRA:
r: 8
alpha: 32
dropout: 0.05
lr: 2.0e-05
split: WikiMIA_QA_256
data_path: lluvecwonv/WikiMIA_QA
gradient_accumulation_steps: 4
num_epochs: 10
forget_loss: grad_ascent
file_path: /root/kdu/output/openlm-research/open_llama_7b_lluvecwonv_WikiMIA_QA_WikiMIA_QA_256_logit/
unlearned_model: 0
save_dir: ${model_family}/${split}_${unlearned_model}_${num_epochs}
overwrite_dir: true
weight_decay: 0.01
save_model: true
eval_while_train: false
eval_only: false
seed: 42
master_port: 18765
eval:
model_path: ${..model_path}
model_family: ${..model_family}
save_dir: ${..save_dir}
data_path: luvecwonv/WikiMIA_QA
split: ${..split}
eval_task:
- eval_log
- eval_log_forget
question_key:
- Query
- Query
answer_key:
- Answer
- Answer
generation:
max_length: 200
max_new_tokens: null
save_generated_text: true
ds_size: 300
overwrite: true
use_pretrained: false
batch_size: 4
retain_result: null
|