gmastrapas koukandre commited on
Commit
a4f49a3
1 Parent(s): 1bae062

feat-remove-unnecessary-code (#4)

Browse files

- update config.json (64a336fa04cbed58cf3fc3c31a97694bae659c89)
- Update config.json (bd7d64c9d5d68ac273a50606b814e9bbaa4bdccb)


Co-authored-by: Andreas Koukounas <koukandre@users.noreply.huggingface.co>

Files changed (1) hide show
  1. config.json +0 -102
config.json CHANGED
@@ -14,125 +14,36 @@
14
  "projection_dim": 768,
15
  "text_config": {
16
  "_name_or_path": "",
17
- "add_cross_attention": false,
18
- "architectures": null,
19
- "bad_words_ids": null,
20
- "begin_suppress_tokens": null,
21
- "bos_token_id": null,
22
- "chunk_size_feed_forward": 0,
23
- "cross_attention_hidden_size": null,
24
- "decoder_start_token_id": null,
25
- "diversity_penalty": 0.0,
26
- "do_sample": false,
27
- "early_stopping": false,
28
  "embed_dim": 768,
29
- "encoder_no_repeat_ngram_size": 0,
30
- "eos_token_id": null,
31
- "exponential_decay_length_penalty": null,
32
- "finetuning_task": null,
33
- "forced_bos_token_id": null,
34
- "forced_eos_token_id": null,
35
  "hf_model_config_kwargs": {
36
  "use_flash_attn": false
37
  },
38
  "hf_model_name_or_path": "jinaai/jina-bert-flash-implementation",
39
- "id2label": {
40
- "0": "LABEL_0",
41
- "1": "LABEL_1"
42
- },
43
- "is_decoder": false,
44
- "is_encoder_decoder": false,
45
- "label2id": {
46
- "LABEL_0": 0,
47
- "LABEL_1": 1
48
- },
49
- "length_penalty": 1.0,
50
- "max_length": 20,
51
- "min_length": 0,
52
  "model_type": "jina_clip_text",
53
- "no_repeat_ngram_size": 0,
54
- "num_beam_groups": 1,
55
- "num_beams": 1,
56
- "num_return_sequences": 1,
57
  "output_attentions": false,
58
  "output_hidden_states": false,
59
  "output_scores": false,
60
  "pad_token_id": null,
61
  "pooler_type": "mean_pooler",
62
- "prefix": null,
63
- "problem_type": null,
64
  "proj_bias": false,
65
  "proj_type": null,
66
- "pruned_heads": {},
67
- "remove_invalid_values": false,
68
- "repetition_penalty": 1.0,
69
- "return_dict": true,
70
- "return_dict_in_generate": false,
71
- "sep_token_id": null,
72
- "suppress_tokens": null,
73
- "task_specific_params": null,
74
- "temperature": 1.0,
75
- "tf_legacy_loss": false,
76
- "tie_encoder_decoder": false,
77
- "tie_word_embeddings": true,
78
- "tokenizer_class": null,
79
- "top_k": 50,
80
- "top_p": 1.0,
81
- "torch_dtype": null,
82
- "torchscript": false,
83
  "transformers_version": "4.36.2",
84
- "typical_p": 1.0,
85
  "use_bfloat16": false
86
  },
87
  "torch_dtype": "float32",
88
  "transformers_version": null,
89
  "vision_config": {
90
  "_name_or_path": "",
91
- "add_cross_attention": false,
92
- "architectures": null,
93
- "bad_words_ids": null,
94
- "begin_suppress_tokens": null,
95
- "bos_token_id": null,
96
- "chunk_size_feed_forward": 0,
97
- "cross_attention_hidden_size": null,
98
- "decoder_start_token_id": null,
99
- "diversity_penalty": 0.0,
100
- "do_sample": false,
101
- "drop_path_rate": 0.0,
102
- "early_stopping": false,
103
  "embed_dim": 768,
104
- "encoder_no_repeat_ngram_size": 0,
105
- "eos_token_id": null,
106
- "exponential_decay_length_penalty": null,
107
- "finetuning_task": null,
108
- "forced_bos_token_id": null,
109
- "forced_eos_token_id": null,
110
  "fused_layer_norm": false,
111
  "head_width": 64,
112
- "id2label": {
113
- "0": "LABEL_0",
114
- "1": "LABEL_1"
115
- },
116
  "image_size": 224,
117
  "intp_freq": false,
118
- "is_decoder": false,
119
- "is_encoder_decoder": false,
120
- "label2id": {
121
- "LABEL_0": 0,
122
- "LABEL_1": 1
123
- },
124
  "layers": 12,
125
- "length_penalty": 1.0,
126
  "ls_init_value": null,
127
- "max_length": 20,
128
- "min_length": 0,
129
  "mlp_ratio": 2.6667,
130
  "model_type": "jina_clip_vision",
131
  "naive_swiglu": true,
132
- "no_repeat_ngram_size": 0,
133
- "num_beam_groups": 1,
134
- "num_beams": 1,
135
- "num_return_sequences": 1,
136
  "output_attentions": false,
137
  "output_hidden_states": false,
138
  "output_scores": false,
@@ -147,25 +58,12 @@
147
  "pt_hw_seq_len": 14,
148
  "qkv_bias": true,
149
  "remove_invalid_values": false,
150
- "repetition_penalty": 1.0,
151
  "return_dict": true,
152
  "return_dict_in_generate": false,
153
  "rope_embeddings": true,
154
- "sep_token_id": null,
155
  "subln": true,
156
- "suppress_tokens": null,
157
- "task_specific_params": null,
158
- "temperature": 1.0,
159
- "tf_legacy_loss": false,
160
- "tie_encoder_decoder": false,
161
  "tie_word_embeddings": true,
162
- "tokenizer_class": null,
163
- "top_k": 50,
164
- "top_p": 1.0,
165
- "torch_dtype": null,
166
- "torchscript": false,
167
  "transformers_version": "4.36.2",
168
- "typical_p": 1.0,
169
  "use_bfloat16": false,
170
  "width": 768,
171
  "x_attention": false
 
14
  "projection_dim": 768,
15
  "text_config": {
16
  "_name_or_path": "",
 
 
 
 
 
 
 
 
 
 
 
17
  "embed_dim": 768,
 
 
 
 
 
 
18
  "hf_model_config_kwargs": {
19
  "use_flash_attn": false
20
  },
21
  "hf_model_name_or_path": "jinaai/jina-bert-flash-implementation",
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  "model_type": "jina_clip_text",
 
 
 
 
23
  "output_attentions": false,
24
  "output_hidden_states": false,
25
  "output_scores": false,
26
  "pad_token_id": null,
27
  "pooler_type": "mean_pooler",
 
 
28
  "proj_bias": false,
29
  "proj_type": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "transformers_version": "4.36.2",
 
31
  "use_bfloat16": false
32
  },
33
  "torch_dtype": "float32",
34
  "transformers_version": null,
35
  "vision_config": {
36
  "_name_or_path": "",
 
 
 
 
 
 
 
 
 
 
 
 
37
  "embed_dim": 768,
 
 
 
 
 
 
38
  "fused_layer_norm": false,
39
  "head_width": 64,
 
 
 
 
40
  "image_size": 224,
41
  "intp_freq": false,
 
 
 
 
 
 
42
  "layers": 12,
 
43
  "ls_init_value": null,
 
 
44
  "mlp_ratio": 2.6667,
45
  "model_type": "jina_clip_vision",
46
  "naive_swiglu": true,
 
 
 
 
47
  "output_attentions": false,
48
  "output_hidden_states": false,
49
  "output_scores": false,
 
58
  "pt_hw_seq_len": 14,
59
  "qkv_bias": true,
60
  "remove_invalid_values": false,
 
61
  "return_dict": true,
62
  "return_dict_in_generate": false,
63
  "rope_embeddings": true,
 
64
  "subln": true,
 
 
 
 
 
65
  "tie_word_embeddings": true,
 
 
 
 
 
66
  "transformers_version": "4.36.2",
 
67
  "use_bfloat16": false,
68
  "width": 768,
69
  "x_attention": false