Update config.json
Browse files- config.json +3 -1
config.json
CHANGED
@@ -82,6 +82,7 @@
|
|
82 |
"top_p": 1.0,
|
83 |
"torch_dtype": null,
|
84 |
"torchscript": false,
|
|
|
85 |
"typical_p": 1.0,
|
86 |
"use_bfloat16": false,
|
87 |
"use_cache": true,
|
@@ -178,6 +179,7 @@
|
|
178 |
"top_p": 1.0,
|
179 |
"torch_dtype": null,
|
180 |
"torchscript": false,
|
|
|
181 |
"typical_p": 1.0,
|
182 |
"use_absolute_embeddings": false,
|
183 |
"use_bfloat16": false,
|
@@ -188,5 +190,5 @@
|
|
188 |
"pad_token_id": 1,
|
189 |
"tie_word_embeddings": false,
|
190 |
"torch_dtype": "float32",
|
191 |
-
"transformers_version":
|
192 |
}
|
|
|
82 |
"top_p": 1.0,
|
83 |
"torch_dtype": null,
|
84 |
"torchscript": false,
|
85 |
+
"transformers_version": "4.22.0.dev0",
|
86 |
"typical_p": 1.0,
|
87 |
"use_bfloat16": false,
|
88 |
"use_cache": true,
|
|
|
179 |
"top_p": 1.0,
|
180 |
"torch_dtype": null,
|
181 |
"torchscript": false,
|
182 |
+
"transformers_version": "4.22.0.dev0",
|
183 |
"typical_p": 1.0,
|
184 |
"use_absolute_embeddings": false,
|
185 |
"use_bfloat16": false,
|
|
|
190 |
"pad_token_id": 1,
|
191 |
"tie_word_embeddings": false,
|
192 |
"torch_dtype": "float32",
|
193 |
+
"transformers_version": null
|
194 |
}
|