asadmasad commited on
Commit
3b4e87d
1 Parent(s): ab91ab2

Training in progress, epoch 1

Browse files
adapter_config.json CHANGED
@@ -19,13 +19,13 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "q_proj",
23
  "v_proj",
24
- "k_proj",
25
- "gate_proj",
26
  "up_proj",
27
- "o_proj",
28
- "down_proj"
 
 
 
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "v_proj",
 
 
23
  "up_proj",
24
+ "gate_proj",
25
+ "k_proj",
26
+ "q_proj",
27
+ "down_proj",
28
+ "o_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b6819012bbff4af16838bbd45f52611da49d8e2985289e7ea4e1d67664a2651a
3
  size 75013128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4eeec07987dda3e891df486bb0abb113b4a1be3d77d5b528cdab646ff93eaef2
3
  size 75013128
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:553dd821aceaf0babfc0d7e6b600e25378636daf246c057cd953c1755b3b4f7c
3
  size 4792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1fffb9a7bafd9d73d2ce154b5f041bbc463a595f4842c3bd3b644e8db7ebe813
3
  size 4792