multi-meeting-QnA / config /config.json
tykiww's picture
Update config/config.json
7a117c1 verified
raw
history blame
889 Bytes
{
"model": {
"general": {
"max_seq_length": 128,
"seed": 42,
"num_train_epochs": null,
"max_steps": 60,
"dataset_text_field": "text",
"model_name": null,
"repository": null
},
"peft": {
"r": 16,
"alpha": 16,
"dropout": 0,
"bias": "none",
"rslora": false,
"loftq_config": null
},
"sft": {
"per_device_train_batch_size": 2,
"gradient_accumulation_steps": 4,
"learning_rate": 2e-4,
"warmup_steps": 5,
"logging_steps": 1,
"optim": "adamw_8bit",
"weight_decay": 0.01,
"lr_scheduler_type": "linear",
"dataset_num_proc": 2,
"packing": false
},
"choices": ["unsloth/llama-3-8b-bnb-4bit",
"unsloth/Qwen2-7B",
"unsloth/gemma-7b-bnb-4bit",
"unsloth/mistral-7b-v0.3-bnb-4bit"]
}
}