Text Generation
Transformers
Safetensors
zamba2
Inference Endpoints
pglo commited on
Commit
73d23ec
1 Parent(s): 019014a

Upload folder using huggingface_hub

Browse files
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea6b3831ea8ea8f6a3b0979c770bdc12f7c86918ff579b901f3715a47625996b
3
- size 4994052608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2411d6714dd16ea76e396ab38840bb9fb6000159871230d72170209427c9c650
3
+ size 4994053048
model.safetensors.index.json CHANGED
@@ -25,8 +25,10 @@
25
  "model.blocks.0.feed_forward.linear_fc2.weight": "model-00001-of-00002.safetensors",
26
  "model.blocks.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
  "model.blocks.0.pre_ff_layernorm.weight": "model-00001-of-00002.safetensors",
28
- "model.blocks.0.self_attn.linear_proj.weight": "model-00001-of-00002.safetensors",
29
- "model.blocks.0.self_attn.linear_qkv.weight": "model-00001-of-00002.safetensors",
 
 
30
  "model.blocks.1.feed_forward.linear_fc1.weight": "model-00001-of-00002.safetensors",
31
  "model.blocks.1.feed_forward.linear_fc1_lora_A_list.0.weight": "model-00001-of-00002.safetensors",
32
  "model.blocks.1.feed_forward.linear_fc1_lora_A_list.1.weight": "model-00001-of-00002.safetensors",
@@ -49,8 +51,10 @@
49
  "model.blocks.1.feed_forward.linear_fc2.weight": "model-00001-of-00002.safetensors",
50
  "model.blocks.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
51
  "model.blocks.1.pre_ff_layernorm.weight": "model-00001-of-00002.safetensors",
52
- "model.blocks.1.self_attn.linear_proj.weight": "model-00001-of-00002.safetensors",
53
- "model.blocks.1.self_attn.linear_qkv.weight": "model-00001-of-00002.safetensors",
 
 
54
  "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
55
  "model.final_layernorm.weight": "model-00002-of-00002.safetensors",
56
  "model.linear_layers.0.weight": "model-00002-of-00002.safetensors",
 
25
  "model.blocks.0.feed_forward.linear_fc2.weight": "model-00001-of-00002.safetensors",
26
  "model.blocks.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
  "model.blocks.0.pre_ff_layernorm.weight": "model-00001-of-00002.safetensors",
28
+ "model.blocks.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.blocks.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
30
+ "model.blocks.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
31
+ "model.blocks.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
32
  "model.blocks.1.feed_forward.linear_fc1.weight": "model-00001-of-00002.safetensors",
33
  "model.blocks.1.feed_forward.linear_fc1_lora_A_list.0.weight": "model-00001-of-00002.safetensors",
34
  "model.blocks.1.feed_forward.linear_fc1_lora_A_list.1.weight": "model-00001-of-00002.safetensors",
 
51
  "model.blocks.1.feed_forward.linear_fc2.weight": "model-00001-of-00002.safetensors",
52
  "model.blocks.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
53
  "model.blocks.1.pre_ff_layernorm.weight": "model-00001-of-00002.safetensors",
54
+ "model.blocks.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.blocks.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.blocks.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
57
+ "model.blocks.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
58
  "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
59
  "model.final_layernorm.weight": "model-00002-of-00002.safetensors",
60
  "model.linear_layers.0.weight": "model-00002-of-00002.safetensors",