Joseph717171
commited on
Commit
•
63e5bbc
1
Parent(s):
726910d
Update config.json
Browse filesMy dude! You forgot to put the rope_theta in the config.json. Your model works past 900 tokens now. 😋
- config.json +1 -0
config.json
CHANGED
@@ -29,6 +29,7 @@
|
|
29 |
"original_max_position_embeddings": 8192,
|
30 |
"rope_type": "llama3"
|
31 |
},
|
|
|
32 |
"tie_word_embeddings": false,
|
33 |
"torch_dtype": "bfloat16",
|
34 |
"transformers_version": "4.42.3",
|
|
|
29 |
"original_max_position_embeddings": 8192,
|
30 |
"rope_type": "llama3"
|
31 |
},
|
32 |
+
"rope_theta": 500000.0,
|
33 |
"tie_word_embeddings": false,
|
34 |
"torch_dtype": "bfloat16",
|
35 |
"transformers_version": "4.42.3",
|