Hjgugugjhuhjggg commited on
Commit
7ee2374
1 Parent(s): 3825ef9

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
@@ -16,10 +17,9 @@
16
  "initializer_range": 0.02,
17
  "intermediate_size": 8192,
18
  "max_position_embeddings": 131072,
19
- "mlp_bias": false,
20
  "model_type": "llama",
21
- "num_attention_heads": 24,
22
- "num_hidden_layers": 28,
23
  "num_key_value_heads": 8,
24
  "pretraining_tp": 1,
25
  "rms_norm_eps": 1e-05,
 
1
  {
2
+ "_name_or_path": "lilmeaty/ksksksk",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
17
  "initializer_range": 0.02,
18
  "intermediate_size": 8192,
19
  "max_position_embeddings": 131072,
 
20
  "model_type": "llama",
21
+ "num_attention_heads": 32,
22
+ "num_hidden_layers": 42,
23
  "num_key_value_heads": 8,
24
  "pretraining_tp": 1,
25
  "rms_norm_eps": 1e-05,