File size: 881 Bytes
ebec5e6 be00c5f ebec5e6 d8ccafa 290ae69 622d2b9 b68c624 9d1413c b68c624 ebec5e6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
{
"arch": "mini_gpt4_llama_v2",
"architectures": [
"MiniGPT4_llama_v2"
],
"chat_template": true,
"ckpt": "checkpoints/video_llama_checkpoint_last.pth",
"device": "cuda",
"drop_path_rate": 0,
"end_sym": "</s>",
"freeze_qformer": true,
"freeze_vit": true,
"img_size": 224,
"length": 50,
"llama_model": "meta-llama/Llama-2-7b-chat-hf",
"lora_alpha": 16,
"lora_r": 64,
"low_resource": true,
"max_context_len": 3600,
"max_txt_len": 256,
"model_type": "minigpt4_video",
"num_query_token": 32,
"prompt": "",
"torch_dtype": "float32",
"transformers_version": "4.42.3",
"use_grad_checkpoint": true,
"use_grad_checkpoint_llm": true,
"vit_precision": "fp16",
"vit_model": "eva_clip_g",
"token_pooling": true,
"lora_target_modules" : ["q_proj","v_proj"],
"lora_dropout": 0.05,
"remove_template": false,
"prompt_path":""
}
|