SimonMA commited on
Commit
5e9b7d6
1 Parent(s): a43ac42

End of training

Browse files
README.md CHANGED
@@ -18,7 +18,7 @@ should probably proofread and complete it, then remove this comment. -->
18
 
19
  This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
20
  It achieves the following results on the evaluation set:
21
- - Loss: 0.3056
22
 
23
  ## Model description
24
 
@@ -50,229 +50,104 @@ The following hyperparameters were used during training:
50
 
51
  | Training Loss | Epoch | Step | Validation Loss |
52
  |:-------------:|:------:|:-----:|:---------------:|
53
- | 0.2053 | 2.3256 | 15000 | 0.2969 |
54
- | 0.1942 | 2.3333 | 15050 | 0.2944 |
55
- | 0.1871 | 2.3411 | 15100 | 0.2949 |
56
- | 0.1845 | 2.3488 | 15150 | 0.2954 |
57
- | 0.2108 | 2.3566 | 15200 | 0.2950 |
58
- | 0.2065 | 2.3643 | 15250 | 0.2951 |
59
- | 0.193 | 2.3721 | 15300 | 0.2961 |
60
- | 0.1947 | 2.3798 | 15350 | 0.2953 |
61
- | 0.1922 | 2.3876 | 15400 | 0.2949 |
62
- | 0.1777 | 2.3953 | 15450 | 0.2949 |
63
- | 0.212 | 2.4031 | 15500 | 0.2949 |
64
- | 0.1962 | 2.4109 | 15550 | 0.2949 |
65
- | 0.1789 | 2.4186 | 15600 | 0.2951 |
66
- | 0.2183 | 2.4264 | 15650 | 0.2923 |
67
- | 0.1962 | 2.4341 | 15700 | 0.2947 |
68
- | 0.1907 | 2.4419 | 15750 | 0.2928 |
69
- | 0.1936 | 2.4496 | 15800 | 0.2956 |
70
- | 0.2086 | 2.4574 | 15850 | 0.2933 |
71
- | 0.1895 | 2.4651 | 15900 | 0.2959 |
72
- | 0.2157 | 2.4729 | 15950 | 0.2932 |
73
- | 0.1897 | 2.4806 | 16000 | 0.2926 |
74
- | 0.1862 | 2.4884 | 16050 | 0.2937 |
75
- | 0.1899 | 2.4961 | 16100 | 0.2955 |
76
- | 0.187 | 2.5039 | 16150 | 0.2970 |
77
- | 0.2126 | 2.5116 | 16200 | 0.2941 |
78
- | 0.1973 | 2.5194 | 16250 | 0.2933 |
79
- | 0.1743 | 2.5271 | 16300 | 0.2930 |
80
- | 0.1958 | 2.5349 | 16350 | 0.2938 |
81
- | 0.2162 | 2.5426 | 16400 | 0.2919 |
82
- | 0.1872 | 2.5504 | 16450 | 0.2936 |
83
- | 0.1821 | 2.5581 | 16500 | 0.2940 |
84
- | 0.2193 | 2.5659 | 16550 | 0.2940 |
85
- | 0.1983 | 2.5736 | 16600 | 0.2943 |
86
- | 0.2121 | 2.5814 | 16650 | 0.2941 |
87
- | 0.1969 | 2.5891 | 16700 | 0.2923 |
88
- | 0.1963 | 2.5969 | 16750 | 0.2921 |
89
- | 0.2042 | 2.6047 | 16800 | 0.2938 |
90
- | 0.1921 | 2.6124 | 16850 | 0.2914 |
91
- | 0.2081 | 2.6202 | 16900 | 0.2917 |
92
- | 0.1711 | 2.6279 | 16950 | 0.2923 |
93
- | 0.1897 | 2.6357 | 17000 | 0.2918 |
94
- | 0.1917 | 2.6434 | 17050 | 0.2933 |
95
- | 0.1991 | 2.6512 | 17100 | 0.2909 |
96
- | 0.2055 | 2.6589 | 17150 | 0.2930 |
97
- | 0.1932 | 2.6667 | 17200 | 0.2907 |
98
- | 0.2043 | 2.6744 | 17250 | 0.2937 |
99
- | 0.1922 | 2.6822 | 17300 | 0.2922 |
100
- | 0.1785 | 2.6899 | 17350 | 0.2922 |
101
- | 0.2337 | 2.6977 | 17400 | 0.2908 |
102
- | 0.1933 | 2.7054 | 17450 | 0.2922 |
103
- | 0.2012 | 2.7132 | 17500 | 0.2914 |
104
- | 0.1959 | 2.7209 | 17550 | 0.2910 |
105
- | 0.1933 | 2.7287 | 17600 | 0.2882 |
106
- | 0.1824 | 2.7364 | 17650 | 0.2889 |
107
- | 0.2016 | 2.7442 | 17700 | 0.2898 |
108
- | 0.2024 | 2.7519 | 17750 | 0.2915 |
109
- | 0.2101 | 2.7597 | 17800 | 0.2888 |
110
- | 0.1782 | 2.7674 | 17850 | 0.2908 |
111
- | 0.2047 | 2.7752 | 17900 | 0.2902 |
112
- | 0.195 | 2.7829 | 17950 | 0.2895 |
113
- | 0.2122 | 2.7907 | 18000 | 0.2884 |
114
- | 0.2099 | 2.7984 | 18050 | 0.2869 |
115
- | 0.2054 | 2.8062 | 18100 | 0.2882 |
116
- | 0.193 | 2.8140 | 18150 | 0.2884 |
117
- | 0.187 | 2.8217 | 18200 | 0.2895 |
118
- | 0.1997 | 2.8295 | 18250 | 0.2883 |
119
- | 0.1885 | 2.8372 | 18300 | 0.2896 |
120
- | 0.1957 | 2.8450 | 18350 | 0.2871 |
121
- | 0.1905 | 2.8527 | 18400 | 0.2879 |
122
- | 0.1933 | 2.8605 | 18450 | 0.2880 |
123
- | 0.1953 | 2.8682 | 18500 | 0.2871 |
124
- | 0.205 | 2.8760 | 18550 | 0.2865 |
125
- | 0.191 | 2.8837 | 18600 | 0.2870 |
126
- | 0.1903 | 2.8915 | 18650 | 0.2870 |
127
- | 0.1897 | 2.8992 | 18700 | 0.2873 |
128
- | 0.1966 | 2.9070 | 18750 | 0.2871 |
129
- | 0.228 | 2.9147 | 18800 | 0.2875 |
130
- | 0.1948 | 2.9225 | 18850 | 0.2870 |
131
- | 0.1843 | 2.9302 | 18900 | 0.2859 |
132
- | 0.2037 | 2.9380 | 18950 | 0.2872 |
133
- | 0.2087 | 2.9457 | 19000 | 0.2855 |
134
- | 0.1777 | 2.9535 | 19050 | 0.2864 |
135
- | 0.1852 | 2.9612 | 19100 | 0.2866 |
136
- | 0.1879 | 2.9690 | 19150 | 0.2858 |
137
- | 0.2096 | 2.9767 | 19200 | 0.2848 |
138
- | 0.1846 | 2.9845 | 19250 | 0.2857 |
139
- | 0.1782 | 2.9922 | 19300 | 0.2859 |
140
- | 0.1762 | 3.0 | 19350 | 0.2864 |
141
- | 0.1339 | 3.0078 | 19400 | 0.3056 |
142
- | 0.1356 | 3.0155 | 19450 | 0.3067 |
143
- | 0.136 | 3.0233 | 19500 | 0.3073 |
144
- | 0.1389 | 3.0310 | 19550 | 0.3089 |
145
- | 0.1362 | 3.0388 | 19600 | 0.3084 |
146
- | 0.1384 | 3.0465 | 19650 | 0.3085 |
147
- | 0.1167 | 3.0543 | 19700 | 0.3092 |
148
- | 0.1291 | 3.0620 | 19750 | 0.3078 |
149
- | 0.1292 | 3.0698 | 19800 | 0.3092 |
150
- | 0.1257 | 3.0775 | 19850 | 0.3099 |
151
- | 0.1384 | 3.0853 | 19900 | 0.3088 |
152
- | 0.1355 | 3.0930 | 19950 | 0.3076 |
153
- | 0.1244 | 3.1008 | 20000 | 0.3088 |
154
- | 0.141 | 3.1085 | 20050 | 0.3082 |
155
- | 0.1398 | 3.1163 | 20100 | 0.3080 |
156
- | 0.1415 | 3.1240 | 20150 | 0.3085 |
157
- | 0.1521 | 3.1318 | 20200 | 0.3067 |
158
- | 0.1266 | 3.1395 | 20250 | 0.3097 |
159
- | 0.1254 | 3.1473 | 20300 | 0.3101 |
160
- | 0.1403 | 3.1550 | 20350 | 0.3053 |
161
- | 0.1395 | 3.1628 | 20400 | 0.3085 |
162
- | 0.1328 | 3.1705 | 20450 | 0.3074 |
163
- | 0.1381 | 3.1783 | 20500 | 0.3090 |
164
- | 0.1323 | 3.1860 | 20550 | 0.3058 |
165
- | 0.1299 | 3.1938 | 20600 | 0.3092 |
166
- | 0.1432 | 3.2016 | 20650 | 0.3074 |
167
- | 0.1399 | 3.2093 | 20700 | 0.3071 |
168
- | 0.1288 | 3.2171 | 20750 | 0.3076 |
169
- | 0.1464 | 3.2248 | 20800 | 0.3060 |
170
- | 0.1347 | 3.2326 | 20850 | 0.3066 |
171
- | 0.1336 | 3.2403 | 20900 | 0.3080 |
172
- | 0.1245 | 3.2481 | 20950 | 0.3069 |
173
- | 0.1305 | 3.2558 | 21000 | 0.3080 |
174
- | 0.1379 | 3.2636 | 21050 | 0.3050 |
175
- | 0.1269 | 3.2713 | 21100 | 0.3074 |
176
- | 0.1379 | 3.2791 | 21150 | 0.3067 |
177
- | 0.1348 | 3.2868 | 21200 | 0.3077 |
178
- | 0.1261 | 3.2946 | 21250 | 0.3116 |
179
- | 0.1354 | 3.3023 | 21300 | 0.3064 |
180
- | 0.1323 | 3.3101 | 21350 | 0.3061 |
181
- | 0.1255 | 3.3178 | 21400 | 0.3078 |
182
- | 0.135 | 3.3256 | 21450 | 0.3073 |
183
- | 0.1354 | 3.3333 | 21500 | 0.3070 |
184
- | 0.1391 | 3.3411 | 21550 | 0.3066 |
185
- | 0.1295 | 3.3488 | 21600 | 0.3086 |
186
- | 0.1215 | 3.3566 | 21650 | 0.3085 |
187
- | 0.1411 | 3.3643 | 21700 | 0.3072 |
188
- | 0.1393 | 3.3721 | 21750 | 0.3090 |
189
- | 0.132 | 3.3798 | 21800 | 0.3086 |
190
- | 0.1199 | 3.3876 | 21850 | 0.3089 |
191
- | 0.1349 | 3.3953 | 21900 | 0.3069 |
192
- | 0.1325 | 3.4031 | 21950 | 0.3084 |
193
- | 0.1247 | 3.4109 | 22000 | 0.3082 |
194
- | 0.1178 | 3.4186 | 22050 | 0.3062 |
195
- | 0.1218 | 3.4264 | 22100 | 0.3090 |
196
- | 0.131 | 3.4341 | 22150 | 0.3100 |
197
- | 0.1274 | 3.4419 | 22200 | 0.3070 |
198
- | 0.136 | 3.4496 | 22250 | 0.3083 |
199
- | 0.1458 | 3.4574 | 22300 | 0.3076 |
200
- | 0.1365 | 3.4651 | 22350 | 0.3087 |
201
- | 0.1362 | 3.4729 | 22400 | 0.3071 |
202
- | 0.1318 | 3.4806 | 22450 | 0.3073 |
203
- | 0.138 | 3.4884 | 22500 | 0.3067 |
204
- | 0.1413 | 3.4961 | 22550 | 0.3080 |
205
- | 0.1365 | 3.5039 | 22600 | 0.3087 |
206
- | 0.1236 | 3.5116 | 22650 | 0.3078 |
207
- | 0.1503 | 3.5194 | 22700 | 0.3063 |
208
- | 0.1437 | 3.5271 | 22750 | 0.3070 |
209
- | 0.1338 | 3.5349 | 22800 | 0.3070 |
210
- | 0.1256 | 3.5426 | 22850 | 0.3080 |
211
- | 0.1296 | 3.5504 | 22900 | 0.3074 |
212
- | 0.1286 | 3.5581 | 22950 | 0.3061 |
213
- | 0.1334 | 3.5659 | 23000 | 0.3075 |
214
- | 0.133 | 3.5736 | 23050 | 0.3058 |
215
- | 0.113 | 3.5814 | 23100 | 0.3060 |
216
- | 0.1238 | 3.5891 | 23150 | 0.3052 |
217
- | 0.1398 | 3.5969 | 23200 | 0.3044 |
218
- | 0.142 | 3.6047 | 23250 | 0.3054 |
219
- | 0.1257 | 3.6124 | 23300 | 0.3059 |
220
- | 0.1324 | 3.6202 | 23350 | 0.3052 |
221
- | 0.1376 | 3.6279 | 23400 | 0.3039 |
222
- | 0.1343 | 3.6357 | 23450 | 0.3037 |
223
- | 0.1264 | 3.6434 | 23500 | 0.3054 |
224
- | 0.1263 | 3.6512 | 23550 | 0.3062 |
225
- | 0.127 | 3.6589 | 23600 | 0.3054 |
226
- | 0.1187 | 3.6667 | 23650 | 0.3054 |
227
- | 0.1204 | 3.6744 | 23700 | 0.3059 |
228
- | 0.1148 | 3.6822 | 23750 | 0.3065 |
229
- | 0.1205 | 3.6899 | 23800 | 0.3073 |
230
- | 0.1277 | 3.6977 | 23850 | 0.3067 |
231
- | 0.1356 | 3.7054 | 23900 | 0.3067 |
232
- | 0.1518 | 3.7132 | 23950 | 0.3064 |
233
- | 0.1307 | 3.7209 | 24000 | 0.3062 |
234
- | 0.1344 | 3.7287 | 24050 | 0.3061 |
235
- | 0.1326 | 3.7364 | 24100 | 0.3065 |
236
- | 0.1246 | 3.7442 | 24150 | 0.3074 |
237
- | 0.1319 | 3.7519 | 24200 | 0.3071 |
238
- | 0.1436 | 3.7597 | 24250 | 0.3063 |
239
- | 0.1389 | 3.7674 | 24300 | 0.3064 |
240
- | 0.1275 | 3.7752 | 24350 | 0.3065 |
241
- | 0.1353 | 3.7829 | 24400 | 0.3061 |
242
- | 0.1289 | 3.7907 | 24450 | 0.3056 |
243
- | 0.1326 | 3.7984 | 24500 | 0.3053 |
244
- | 0.1244 | 3.8062 | 24550 | 0.3054 |
245
- | 0.1287 | 3.8140 | 24600 | 0.3056 |
246
- | 0.1168 | 3.8217 | 24650 | 0.3058 |
247
- | 0.1298 | 3.8295 | 24700 | 0.3055 |
248
- | 0.1231 | 3.8372 | 24750 | 0.3057 |
249
- | 0.1289 | 3.8450 | 24800 | 0.3059 |
250
- | 0.1184 | 3.8527 | 24850 | 0.3056 |
251
- | 0.1226 | 3.8605 | 24900 | 0.3055 |
252
- | 0.1593 | 3.8682 | 24950 | 0.3057 |
253
- | 0.128 | 3.8760 | 25000 | 0.3064 |
254
- | 0.1332 | 3.8837 | 25050 | 0.3058 |
255
- | 0.1397 | 3.8915 | 25100 | 0.3055 |
256
- | 0.1059 | 3.8992 | 25150 | 0.3058 |
257
- | 0.1281 | 3.9070 | 25200 | 0.3054 |
258
- | 0.1277 | 3.9147 | 25250 | 0.3056 |
259
- | 0.1119 | 3.9225 | 25300 | 0.3059 |
260
- | 0.1212 | 3.9302 | 25350 | 0.3059 |
261
- | 0.1131 | 3.9380 | 25400 | 0.3059 |
262
- | 0.1407 | 3.9457 | 25450 | 0.3059 |
263
- | 0.1286 | 3.9535 | 25500 | 0.3056 |
264
- | 0.1252 | 3.9612 | 25550 | 0.3056 |
265
- | 0.138 | 3.9690 | 25600 | 0.3056 |
266
- | 0.1245 | 3.9767 | 25650 | 0.3056 |
267
- | 0.1213 | 3.9845 | 25700 | 0.3056 |
268
- | 0.1276 | 3.9922 | 25750 | 0.3056 |
269
- | 0.1328 | 4.0 | 25800 | 0.3056 |
270
 
271
 
272
  ### Framework versions
273
 
274
- - PEFT 0.12.0
275
- - Transformers 4.44.2
276
  - Pytorch 2.4.1+cu121
277
- - Datasets 3.0.0
278
- - Tokenizers 0.19.1
 
18
 
19
  This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
20
  It achieves the following results on the evaluation set:
21
+ - Loss: 0.3110
22
 
23
  ## Model description
24
 
 
50
 
51
  | Training Loss | Epoch | Step | Validation Loss |
52
  |:-------------:|:------:|:-----:|:---------------:|
53
+ | 0.2019 | 2.5990 | 17000 | 0.2968 |
54
+ | 0.1928 | 2.6143 | 17100 | 0.2975 |
55
+ | 0.1992 | 2.6296 | 17200 | 0.2981 |
56
+ | 0.1975 | 2.6449 | 17300 | 0.2987 |
57
+ | 0.2003 | 2.6601 | 17400 | 0.2963 |
58
+ | 0.1847 | 2.6754 | 17500 | 0.2970 |
59
+ | 0.1945 | 2.6907 | 17600 | 0.2961 |
60
+ | 0.2057 | 2.7060 | 17700 | 0.2970 |
61
+ | 0.1782 | 2.7213 | 17800 | 0.2967 |
62
+ | 0.1813 | 2.7366 | 17900 | 0.2975 |
63
+ | 0.2001 | 2.7519 | 18000 | 0.2953 |
64
+ | 0.2074 | 2.7672 | 18100 | 0.2959 |
65
+ | 0.1957 | 2.7824 | 18200 | 0.2969 |
66
+ | 0.2006 | 2.7977 | 18300 | 0.2943 |
67
+ | 0.2021 | 2.8130 | 18400 | 0.2939 |
68
+ | 0.1862 | 2.8283 | 18500 | 0.2931 |
69
+ | 0.1951 | 2.8436 | 18600 | 0.2934 |
70
+ | 0.205 | 2.8589 | 18700 | 0.2936 |
71
+ | 0.2094 | 2.8742 | 18800 | 0.2919 |
72
+ | 0.1766 | 2.8895 | 18900 | 0.2935 |
73
+ | 0.2001 | 2.9048 | 19000 | 0.2931 |
74
+ | 0.1977 | 2.9200 | 19100 | 0.2941 |
75
+ | 0.1884 | 2.9353 | 19200 | 0.2922 |
76
+ | 0.1784 | 2.9506 | 19300 | 0.2927 |
77
+ | 0.1857 | 2.9659 | 19400 | 0.2921 |
78
+ | 0.1972 | 2.9812 | 19500 | 0.2926 |
79
+ | 0.1921 | 2.9965 | 19600 | 0.2929 |
80
+ | 0.1433 | 3.0118 | 19700 | 0.3114 |
81
+ | 0.1486 | 3.0271 | 19800 | 0.3115 |
82
+ | 0.1381 | 3.0423 | 19900 | 0.3147 |
83
+ | 0.1375 | 3.0576 | 20000 | 0.3122 |
84
+ | 0.1359 | 3.0729 | 20100 | 0.3144 |
85
+ | 0.133 | 3.0882 | 20200 | 0.3165 |
86
+ | 0.1346 | 3.1035 | 20300 | 0.3151 |
87
+ | 0.132 | 3.1188 | 20400 | 0.3169 |
88
+ | 0.1338 | 3.1341 | 20500 | 0.3137 |
89
+ | 0.1238 | 3.1494 | 20600 | 0.3160 |
90
+ | 0.1264 | 3.1647 | 20700 | 0.3146 |
91
+ | 0.1382 | 3.1799 | 20800 | 0.3139 |
92
+ | 0.136 | 3.1952 | 20900 | 0.3110 |
93
+ | 0.1321 | 3.2105 | 21000 | 0.3129 |
94
+ | 0.134 | 3.2258 | 21100 | 0.3148 |
95
+ | 0.134 | 3.2411 | 21200 | 0.3139 |
96
+ | 0.1338 | 3.2564 | 21300 | 0.3140 |
97
+ | 0.1317 | 3.2717 | 21400 | 0.3148 |
98
+ | 0.1281 | 3.2870 | 21500 | 0.3132 |
99
+ | 0.1279 | 3.3022 | 21600 | 0.3124 |
100
+ | 0.1355 | 3.3175 | 21700 | 0.3133 |
101
+ | 0.127 | 3.3328 | 21800 | 0.3129 |
102
+ | 0.1388 | 3.3481 | 21900 | 0.3157 |
103
+ | 0.1316 | 3.3634 | 22000 | 0.3134 |
104
+ | 0.1378 | 3.3787 | 22100 | 0.3127 |
105
+ | 0.1357 | 3.3940 | 22200 | 0.3131 |
106
+ | 0.1271 | 3.4093 | 22300 | 0.3141 |
107
+ | 0.1333 | 3.4246 | 22400 | 0.3142 |
108
+ | 0.1311 | 3.4398 | 22500 | 0.3133 |
109
+ | 0.1261 | 3.4551 | 22600 | 0.3138 |
110
+ | 0.1313 | 3.4704 | 22700 | 0.3129 |
111
+ | 0.1296 | 3.4857 | 22800 | 0.3135 |
112
+ | 0.1348 | 3.5010 | 22900 | 0.3134 |
113
+ | 0.1252 | 3.5163 | 23000 | 0.3131 |
114
+ | 0.1403 | 3.5316 | 23100 | 0.3117 |
115
+ | 0.1266 | 3.5469 | 23200 | 0.3126 |
116
+ | 0.135 | 3.5621 | 23300 | 0.3135 |
117
+ | 0.1344 | 3.5774 | 23400 | 0.3133 |
118
+ | 0.1452 | 3.5927 | 23500 | 0.3128 |
119
+ | 0.1285 | 3.6080 | 23600 | 0.3131 |
120
+ | 0.1235 | 3.6233 | 23700 | 0.3108 |
121
+ | 0.1255 | 3.6386 | 23800 | 0.3111 |
122
+ | 0.1335 | 3.6539 | 23900 | 0.3114 |
123
+ | 0.1397 | 3.6692 | 24000 | 0.3109 |
124
+ | 0.1359 | 3.6845 | 24100 | 0.3108 |
125
+ | 0.1269 | 3.6997 | 24200 | 0.3120 |
126
+ | 0.1345 | 3.7150 | 24300 | 0.3115 |
127
+ | 0.131 | 3.7303 | 24400 | 0.3111 |
128
+ | 0.1332 | 3.7456 | 24500 | 0.3115 |
129
+ | 0.1226 | 3.7609 | 24600 | 0.3123 |
130
+ | 0.1244 | 3.7762 | 24700 | 0.3114 |
131
+ | 0.123 | 3.7915 | 24800 | 0.3115 |
132
+ | 0.1302 | 3.8068 | 24900 | 0.3103 |
133
+ | 0.1291 | 3.8220 | 25000 | 0.3108 |
134
+ | 0.1335 | 3.8373 | 25100 | 0.3118 |
135
+ | 0.1251 | 3.8526 | 25200 | 0.3115 |
136
+ | 0.1321 | 3.8679 | 25300 | 0.3111 |
137
+ | 0.1249 | 3.8832 | 25400 | 0.3111 |
138
+ | 0.1324 | 3.8985 | 25500 | 0.3111 |
139
+ | 0.1236 | 3.9138 | 25600 | 0.3112 |
140
+ | 0.1399 | 3.9291 | 25700 | 0.3108 |
141
+ | 0.1255 | 3.9444 | 25800 | 0.3107 |
142
+ | 0.1462 | 3.9596 | 25900 | 0.3107 |
143
+ | 0.1217 | 3.9749 | 26000 | 0.3108 |
144
+ | 0.1238 | 3.9902 | 26100 | 0.3110 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
145
 
146
 
147
  ### Framework versions
148
 
149
+ - PEFT 0.13.0
150
+ - Transformers 4.45.1
151
  - Pytorch 2.4.1+cu121
152
+ - Datasets 3.0.1
153
+ - Tokenizers 0.20.0
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "up_proj",
24
- "k_proj",
25
- "gate_proj",
26
- "o_proj",
27
  "down_proj",
28
  "v_proj",
29
- "q_proj"
 
 
 
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
 
 
23
  "down_proj",
24
  "v_proj",
25
+ "gate_proj",
26
+ "up_proj",
27
+ "k_proj",
28
+ "q_proj",
29
+ "o_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4a700f3ab144429aa5bccb81a0341e97f8a4ac23bd05bba88d4137e7f1c0d0e1
3
  size 2332095256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f26e9b8ef8d5169271ba680f48b9d0510d19bd9a087df95b5a61533aff1c1cb0
3
  size 2332095256
runs/Sep27_14-02-49_c7a37e839993/events.out.tfevents.1727445774.c7a37e839993.5984.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02cbabec9f2c937aea38af82786efe0a8d8daa0e87e923bae3e4ea989f21fb7c
3
+ size 87132
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0aebd9b1ef838cb16fa3ad38837049e149f15db8e0f3f3b037395dfab7a4203c
3
- size 5496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:afb960b7c3e89a5d16b4b0f764487e8393fbeb4051a6f6785182b6b24d70f7b2
3
+ size 5560