Fix tie_word_embeddings
Browse files- config.json +1 -0
config.json
CHANGED
@@ -24,6 +24,7 @@
|
|
24 |
"pad_token_id": 0,
|
25 |
"relative_attention_max_distance": 128,
|
26 |
"relative_attention_num_buckets": 32,
|
|
|
27 |
"torch_dtype": "float32",
|
28 |
"transformers_version": "4.19.0.dev0",
|
29 |
"use_cache": true,
|
|
|
24 |
"pad_token_id": 0,
|
25 |
"relative_attention_max_distance": 128,
|
26 |
"relative_attention_num_buckets": 32,
|
27 |
+
"tie_word_embeddings": false,
|
28 |
"torch_dtype": "float32",
|
29 |
"transformers_version": "4.19.0.dev0",
|
30 |
"use_cache": true,
|