nblinh63 commited on
Commit
0b175c6
·
verified ·
1 Parent(s): 5d4b2ae

End of training

Browse files
README.md CHANGED
@@ -103,7 +103,7 @@ xformers_attention: true
103
 
104
  This model is a fine-tuned version of [unsloth/codegemma-2b](https://huggingface.co/unsloth/codegemma-2b) on the None dataset.
105
  It achieves the following results on the evaluation set:
106
- - Loss: 3.2219
107
 
108
  ## Model description
109
 
@@ -135,7 +135,7 @@ The following hyperparameters were used during training:
135
 
136
  | Training Loss | Epoch | Step | Validation Loss |
137
  |:-------------:|:------:|:----:|:---------------:|
138
- | 5.4915 | 0.0001 | 10 | 3.2219 |
139
 
140
 
141
  ### Framework versions
 
103
 
104
  This model is a fine-tuned version of [unsloth/codegemma-2b](https://huggingface.co/unsloth/codegemma-2b) on the None dataset.
105
  It achieves the following results on the evaluation set:
106
+ - Loss: 3.2094
107
 
108
  ## Model description
109
 
 
135
 
136
  | Training Loss | Epoch | Step | Validation Loss |
137
  |:-------------:|:------:|:----:|:---------------:|
138
+ | 5.4834 | 0.0001 | 10 | 3.2094 |
139
 
140
 
141
  ### Framework versions
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "gate_proj",
24
- "up_proj",
25
  "v_proj",
26
- "q_proj",
27
  "down_proj",
 
 
28
  "k_proj",
29
- "o_proj"
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
23
  "v_proj",
 
24
  "down_proj",
25
+ "o_proj",
26
+ "q_proj",
27
  "k_proj",
28
+ "gate_proj",
29
+ "up_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:26e6a2af36bfa8b535273c346b14bd6c51b172310778b397af486cd1f115a3ca
3
  size 78537274
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d4745f22bb5b4b92181a482699721fe54e94a820b7511859e6e825f50f499c3
3
  size 78537274
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f203d788dcc1cb27d3a9cb6ca33c37381ca08e19638acaf63fdb4fabc2fa5881
3
  size 78480072
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e96ac6c096fc63d55f2a7ba265544c17c03103b79fa0617d548584a096ba130
3
  size 78480072
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:301b4d47f616d4b6cc1ace74be117c5f4a68933209583c1dacdf35a34bd0efa9
3
  size 6776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6211da5b9065e2d926a02cd5a7aa101b89889d60f100c5e254fa041e8ff99426
3
  size 6776