saattrupdan
commited on
Commit
•
010d4ca
1
Parent(s):
cfb10ca
Add `max_length` to model config
Browse filesThis is required by the conversational pipeline.
- config.json +2 -1
config.json
CHANGED
@@ -32,5 +32,6 @@
|
|
32 |
"torch_dtype": "float32",
|
33 |
"transformers_version": "4.22.1",
|
34 |
"use_cache": true,
|
35 |
-
"vocab_size": 64000
|
|
|
36 |
}
|
|
|
32 |
"torch_dtype": "float32",
|
33 |
"transformers_version": "4.22.1",
|
34 |
"use_cache": true,
|
35 |
+
"vocab_size": 64000,
|
36 |
+
"max_length": 2048
|
37 |
}
|