Upload config.json
Browse files- config.json +8 -8
config.json
CHANGED
@@ -1,18 +1,18 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
"Florence2ForConditionalGeneration"
|
5 |
],
|
6 |
"auto_map": {
|
7 |
-
"AutoConfig": "
|
8 |
-
"AutoModelForCausalLM": "
|
9 |
},
|
10 |
-
"bos_token_id":
|
11 |
-
"eos_token_id":
|
12 |
"ignore_index": -100,
|
13 |
"is_encoder_decoder": true,
|
14 |
"model_type": "florence2",
|
15 |
-
"pad_token_id":
|
16 |
"projection_dim": 1024,
|
17 |
"text_config": {
|
18 |
"_name_or_path": "",
|
@@ -105,7 +105,7 @@
|
|
105 |
"vocab_size": 51289
|
106 |
},
|
107 |
"torch_dtype": "float32",
|
108 |
-
"transformers_version": "4.
|
109 |
"vision_config": {
|
110 |
"_name_or_path": "",
|
111 |
"add_cross_attention": false,
|
@@ -160,7 +160,7 @@
|
|
160 |
"length_penalty": 1.0,
|
161 |
"max_length": 20,
|
162 |
"min_length": 0,
|
163 |
-
"model_type": "",
|
164 |
"no_repeat_ngram_size": 0,
|
165 |
"num_beam_groups": 1,
|
166 |
"num_beams": 1,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "model_checkpoints/gigantic_fukuiraptor/epoch_9/",
|
3 |
"architectures": [
|
4 |
"Florence2ForConditionalGeneration"
|
5 |
],
|
6 |
"auto_map": {
|
7 |
+
"AutoConfig": "configuration_florence2.Florence2Config",
|
8 |
+
"AutoModelForCausalLM": "modeling_florence2.Florence2ForConditionalGeneration"
|
9 |
},
|
10 |
+
"bos_token_id": 0,
|
11 |
+
"eos_token_id": 2,
|
12 |
"ignore_index": -100,
|
13 |
"is_encoder_decoder": true,
|
14 |
"model_type": "florence2",
|
15 |
+
"pad_token_id": 1,
|
16 |
"projection_dim": 1024,
|
17 |
"text_config": {
|
18 |
"_name_or_path": "",
|
|
|
105 |
"vocab_size": 51289
|
106 |
},
|
107 |
"torch_dtype": "float32",
|
108 |
+
"transformers_version": "4.41.2",
|
109 |
"vision_config": {
|
110 |
"_name_or_path": "",
|
111 |
"add_cross_attention": false,
|
|
|
160 |
"length_penalty": 1.0,
|
161 |
"max_length": 20,
|
162 |
"min_length": 0,
|
163 |
+
"model_type": "davit",
|
164 |
"no_repeat_ngram_size": 0,
|
165 |
"num_beam_groups": 1,
|
166 |
"num_beams": 1,
|