valhalla commited on
Commit
3727878
1 Parent(s): a2ab452

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -2
config.json CHANGED
@@ -59,6 +59,7 @@
59
  "pad_token_id": 1,
60
  "prefix": null,
61
  "problem_type": null,
 
62
  "pruned_heads": {},
63
  "remove_invalid_values": false,
64
  "repetition_penalty": 1.0,
@@ -83,7 +84,8 @@
83
  "hidden_size": 768,
84
  "intermediate_size": 3072,
85
  "num_attention_heads": 12,
86
- "num_hidden_layers": 12
 
87
  },
88
  "torch_dtype": "float32",
89
  "transformers_version": null,
@@ -141,6 +143,7 @@
141
  "patch_size": 14,
142
  "prefix": null,
143
  "problem_type": null,
 
144
  "pruned_heads": {},
145
  "remove_invalid_values": false,
146
  "repetition_penalty": 1.0,
@@ -166,6 +169,7 @@
166
  "intermediate_size": 4096,
167
  "num_attention_heads": 16,
168
  "num_hidden_layers": 24,
169
- "patch_size": 14
 
170
  }
171
  }
 
59
  "pad_token_id": 1,
60
  "prefix": null,
61
  "problem_type": null,
62
+ "projection_dim" : 768,
63
  "pruned_heads": {},
64
  "remove_invalid_values": false,
65
  "repetition_penalty": 1.0,
 
84
  "hidden_size": 768,
85
  "intermediate_size": 3072,
86
  "num_attention_heads": 12,
87
+ "num_hidden_layers": 12,
88
+ "projection_dim": 768,
89
  },
90
  "torch_dtype": "float32",
91
  "transformers_version": null,
 
143
  "patch_size": 14,
144
  "prefix": null,
145
  "problem_type": null,
146
+ "projection_dim" : 768,
147
  "pruned_heads": {},
148
  "remove_invalid_values": false,
149
  "repetition_penalty": 1.0,
 
169
  "intermediate_size": 4096,
170
  "num_attention_heads": 16,
171
  "num_hidden_layers": 24,
172
+ "patch_size": 14,
173
+ "projection_dim": 768,
174
  }
175
  }