|
{ |
|
"arch": "mini_gpt4_llama_v2", |
|
"architectures": [ |
|
"MiniGPT4_llama_v2" |
|
], |
|
"chat_template": true, |
|
"ckpt": "checkpoints/video_llama_checkpoint_last.pth", |
|
"device": "cuda", |
|
"drop_path_rate": 0, |
|
"end_sym": "</s>", |
|
"freeze_qformer": true, |
|
"freeze_vit": true, |
|
"img_size": 224, |
|
"length": 50, |
|
"llama_model": "meta-llama/Llama-2-7b-chat-hf", |
|
"lora_alpha": 16, |
|
"lora_r": 64, |
|
"low_resource": true, |
|
"max_context_len": 3600, |
|
"max_txt_len": 256, |
|
"model_type": "minigpt4_video", |
|
"num_query_token": 32, |
|
"prompt": "", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.42.3", |
|
"use_grad_checkpoint": true, |
|
"use_grad_checkpoint_llm": true, |
|
"vit_precision": "fp16", |
|
"vit_model": "eva_clip_g", |
|
"token_pooling": true, |
|
"lora_target_modules" : ["q_proj","v_proj"], |
|
"lora_dropout": 0.05, |
|
"remove_template": false |
|
|
|
} |
|
|