|
{ |
|
"_name_or_path": "/mnt/petrelfs/tianjie/projects/Qwen-VL/vision_model/", |
|
"architectures": [ |
|
"QWenForClassification" |
|
], |
|
"attn_dropout_prob": 0.0, |
|
"auto_map": { |
|
"AutoConfig": "configuration_qwen.QWenConfig", |
|
"AutoModelForCausalLM": "modeling_qwen.QWenForClassification" |
|
}, |
|
"bf16": true, |
|
"class_name": [ |
|
"\u5199\u5b9e\u98ce\u683c", |
|
"\u56fe\u6807\u98ce\u683c", |
|
"\u5361\u901a\u98ce\u683c", |
|
"\u827a\u672f\u98ce\u683c", |
|
"\u9ed1\u767d\u7b80\u7b14\u98ce\u683c", |
|
"\u7eaf\u6587\u5b57\u98ce\u683c", |
|
"3D\u98ce\u683c", |
|
"\u6c34\u5f69\u98ce\u683c", |
|
"\u7d20\u63cf\u98ce\u683c", |
|
"\u50cf\u7d20\u98ce\u683c", |
|
"\u63d2\u753b\u98ce\u683c", |
|
"\u79d1\u6280\u98ce\u683c", |
|
"\u4e2d\u56fd\u98ce\u98ce\u683c" |
|
], |
|
"emb_dropout_prob": 0.0, |
|
"fp16": false, |
|
"fp32": false, |
|
"gamma": 0.9, |
|
"hidden_size": 4096, |
|
"id2label": { |
|
"0": "LABEL_0", |
|
"1": "LABEL_1", |
|
"2": "LABEL_2", |
|
"3": "LABEL_3", |
|
"4": "LABEL_4", |
|
"5": "LABEL_5", |
|
"6": "LABEL_6", |
|
"7": "LABEL_7", |
|
"8": "LABEL_8", |
|
"9": "LABEL_9", |
|
"10": "LABEL_10", |
|
"11": "LABEL_11", |
|
"12": "LABEL_12" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 22016, |
|
"kv_channels": 128, |
|
"label2id": { |
|
"LABEL_0": 0, |
|
"LABEL_1": 1, |
|
"LABEL_10": 10, |
|
"LABEL_11": 11, |
|
"LABEL_12": 12, |
|
"LABEL_2": 2, |
|
"LABEL_3": 3, |
|
"LABEL_4": 4, |
|
"LABEL_5": 5, |
|
"LABEL_6": 6, |
|
"LABEL_7": 7, |
|
"LABEL_8": 8, |
|
"LABEL_9": 9 |
|
}, |
|
"layer_norm_epsilon": 1e-06, |
|
"max_position_embeddings": 8192, |
|
"model_type": "qwen", |
|
"no_bias": true, |
|
"num_attention_heads": 32, |
|
"num_hidden_layers": 32, |
|
"onnx_safe": null, |
|
"rotary_emb_base": 10000, |
|
"rotary_pct": 1.0, |
|
"scale_attn_weights": true, |
|
"seq_length": 2048, |
|
"tie_word_embeddings": false, |
|
"tokenizer_type": "QWenTokenizer", |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": "4.34.0", |
|
"use_cache": false, |
|
"use_dynamic_ntk": true, |
|
"use_flash_attn": false, |
|
"use_logn_attn": true, |
|
"visual": { |
|
"heads": 16, |
|
"image_size": 448, |
|
"image_start_id": 151857, |
|
"layers": 48, |
|
"mlp_ratio": 4.9231, |
|
"output_dim": 4096, |
|
"patch_size": 14, |
|
"width": 1664 |
|
}, |
|
"vocab_size": 151936 |
|
} |
|
|