TinyLlama-1.1B-intermediate-step-1195k-token-2.5T_huth_adalora_lr5e-05_bs2_epoch5_wd0.01
/
adapter_config.json
{ | |
"alpha_pattern": {}, | |
"auto_mapping": null, | |
"base_model_name_or_path": "TinyLlama/TinyLlama-1.1B-intermediate-step-1195k-token-2.5T", | |
"beta1": 0.85, | |
"beta2": 0.85, | |
"bias": "none", | |
"deltaT": 1, | |
"fan_in_fan_out": false, | |
"inference_mode": true, | |
"init_lora_weights": true, | |
"init_r": 12, | |
"layers_pattern": null, | |
"layers_to_transform": null, | |
"loftq_config": {}, | |
"lora_alpha": 32, | |
"lora_dropout": 0.1, | |
"megatron_config": null, | |
"megatron_core": "megatron.core", | |
"modules_to_save": null, | |
"orth_reg_weight": 0.5, | |
"peft_type": "ADALORA", | |
"r": 8, | |
"rank_pattern": null, | |
"revision": null, | |
"target_modules": [ | |
"q_proj", | |
"v_proj" | |
], | |
"target_r": 8, | |
"task_type": "CAUSAL_LM", | |
"tfinal": 0, | |
"tinit": 0, | |
"total_step": null | |
} |