SimonMA commited on
Commit
c719caf
1 Parent(s): 3c18ff0

Training in progress, step 25

Browse files
adapter_config.json CHANGED
@@ -21,12 +21,12 @@
21
  "revision": null,
22
  "target_modules": [
23
  "k_proj",
24
- "q_proj",
25
- "gate_proj",
26
- "down_proj",
27
  "o_proj",
 
 
28
  "v_proj",
29
- "up_proj"
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
21
  "revision": null,
22
  "target_modules": [
23
  "k_proj",
 
 
 
24
  "o_proj",
25
+ "up_proj",
26
+ "gate_proj",
27
  "v_proj",
28
+ "down_proj",
29
+ "q_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a912ffc457e5be08808573f311fb55057fd62a8a1b9adaf18c7a7a580c382f27
3
  size 2332095256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4dd50a89f6bdb247642f8720f93360ccf97334a0b90ac40d32700721922e11e
3
  size 2332095256
runs/Mar26_06-45-04_ef3683ac757a/events.out.tfevents.1711435513.ef3683ac757a.951.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:56445db638bed265419c30ca2360e890c6610d04be9d00a3f999f7cdcda13a08
3
- size 8556
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ae0167f8fcb117b3dfdb3f680672c4d68995719f6f1cd4ae6bd47a569fe8694
3
+ size 9038
runs/Mar26_07-02-38_ef3683ac757a/events.out.tfevents.1711436567.ef3683ac757a.9298.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91b644efb2a32ec330b8827ac78bd62ccfe5ba944dcdc95617efb2d740eef4f2
3
+ size 5411
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:80b1399101a1f2148c5def843e11892dd85d9855f962f015257028721c3fcd34
3
  size 4984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ff0dbc3dc2bcb2c876190adee0b2926750808992e31dc4b1180b964684529df
3
  size 4984