SimonMA commited on
Commit
40ca6f9
1 Parent(s): 1903ea5

End of training

Browse files
README.md CHANGED
@@ -27,17 +27,18 @@ print(output["generated_text"])
27
 
28
  ## Training procedure
29
 
 
30
 
31
 
32
  This model was trained with SFT.
33
 
34
  ### Framework versions
35
 
36
- - TRL: 0.12.2
37
- - Transformers: 4.46.3
38
  - Pytorch: 2.5.1+cu121
39
  - Datasets: 3.2.0
40
- - Tokenizers: 0.20.3
41
 
42
  ## Citations
43
 
 
27
 
28
  ## Training procedure
29
 
30
+
31
 
32
 
33
  This model was trained with SFT.
34
 
35
  ### Framework versions
36
 
37
+ - TRL: 0.13.0
38
+ - Transformers: 4.47.1
39
  - Pytorch: 2.5.1+cu121
40
  - Datasets: 3.2.0
41
+ - Tokenizers: 0.21.0
42
 
43
  ## Citations
44
 
adapter_config.json CHANGED
@@ -23,12 +23,12 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "q_proj",
27
- "up_proj",
28
- "down_proj",
29
  "o_proj",
 
30
  "v_proj",
31
  "k_proj",
 
 
32
  "gate_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
 
 
26
  "o_proj",
27
+ "up_proj",
28
  "v_proj",
29
  "k_proj",
30
+ "q_proj",
31
+ "down_proj",
32
  "gate_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a1ce3f79e35c7f7812a6ab7b1130e384ed61a50e057f55081e85d5a156d9ef21
3
  size 2332095256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:849bf486e8d8a7f83a04b71c31713b161d519d61c63b41bd97421e4fa3d07390
3
  size 2332095256
runs/Dec22_14-04-53_b7e549e10e5f/events.out.tfevents.1734876308.b7e549e10e5f.5931.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2dc5e4d9ac2bde2aca0b5797d97192cccc5ae10771ed3e3eaaaa9bd284d8227f
3
+ size 182270
tokenizer_config.json CHANGED
@@ -78,6 +78,7 @@
78
  "clean_up_tokenization_spaces": false,
79
  "eos_token": "</s>",
80
  "eot_token": "▁<EOT>",
 
81
  "fill_token": "<FILL_ME>",
82
  "legacy": null,
83
  "middle_token": "▁<MID>",
 
78
  "clean_up_tokenization_spaces": false,
79
  "eos_token": "</s>",
80
  "eot_token": "▁<EOT>",
81
+ "extra_special_tokens": {},
82
  "fill_token": "<FILL_ME>",
83
  "legacy": null,
84
  "middle_token": "▁<MID>",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e27b34c197e15df32f32379c11b3fde527439c8709751b5f548d576bae9abcc8
3
- size 5560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:282286dc8bbe6445ca0ecef09efaf67874c790156e11533cc93bc11bf0ccef78
3
+ size 5624