Upload MambaForCausalLMwithTargetLMHead
Browse files- config.json +1 -0
config.json
CHANGED
@@ -26,6 +26,7 @@
|
|
26 |
"ssm_cfg": {},
|
27 |
"state_size": 16,
|
28 |
"target_vocab_size": 128256,
|
|
|
29 |
"time_step_floor": 0.0001,
|
30 |
"time_step_init_scheme": "random",
|
31 |
"time_step_max": 0.1,
|
|
|
26 |
"ssm_cfg": {},
|
27 |
"state_size": 16,
|
28 |
"target_vocab_size": 128256,
|
29 |
+
"tie_word_embeddings": false,
|
30 |
"time_step_floor": 0.0001,
|
31 |
"time_step_init_scheme": "random",
|
32 |
"time_step_max": 0.1,
|