SimonMA commited on
Commit
a43ac42
1 Parent(s): c389f32

End of training

Browse files
README.md CHANGED
@@ -18,7 +18,7 @@ should probably proofread and complete it, then remove this comment. -->
18
 
19
  This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
20
  It achieves the following results on the evaluation set:
21
- - Loss: 0.3065
22
 
23
  ## Model description
24
 
@@ -50,235 +50,229 @@ The following hyperparameters were used during training:
50
 
51
  | Training Loss | Epoch | Step | Validation Loss |
52
  |:-------------:|:------:|:-----:|:---------------:|
53
- | 0.189 | 2.1545 | 13000 | 0.2986 |
54
- | 0.2052 | 2.1627 | 13050 | 0.2999 |
55
- | 0.2068 | 2.1710 | 13100 | 0.2962 |
56
- | 0.1851 | 2.1793 | 13150 | 0.2998 |
57
- | 0.2012 | 2.1876 | 13200 | 0.2979 |
58
- | 0.182 | 2.1959 | 13250 | 0.2975 |
59
- | 0.2076 | 2.2042 | 13300 | 0.2975 |
60
- | 0.1883 | 2.2125 | 13350 | 0.2966 |
61
- | 0.1748 | 2.2207 | 13400 | 0.2962 |
62
- | 0.1783 | 2.2290 | 13450 | 0.2982 |
63
- | 0.1898 | 2.2373 | 13500 | 0.2976 |
64
- | 0.2092 | 2.2456 | 13550 | 0.2966 |
65
- | 0.1828 | 2.2539 | 13600 | 0.2955 |
66
- | 0.1997 | 2.2622 | 13650 | 0.2974 |
67
- | 0.1966 | 2.2705 | 13700 | 0.2975 |
68
- | 0.2008 | 2.2788 | 13750 | 0.2955 |
69
- | 0.2011 | 2.2870 | 13800 | 0.2946 |
70
- | 0.1937 | 2.2953 | 13850 | 0.2979 |
71
- | 0.2004 | 2.3036 | 13900 | 0.2956 |
72
- | 0.1816 | 2.3119 | 13950 | 0.2971 |
73
- | 0.1935 | 2.3202 | 14000 | 0.2957 |
74
- | 0.1864 | 2.3285 | 14050 | 0.2962 |
75
- | 0.1914 | 2.3368 | 14100 | 0.2945 |
76
- | 0.1939 | 2.3450 | 14150 | 0.2948 |
77
- | 0.1912 | 2.3533 | 14200 | 0.2954 |
78
- | 0.1862 | 2.3616 | 14250 | 0.2956 |
79
- | 0.1951 | 2.3699 | 14300 | 0.2950 |
80
- | 0.1958 | 2.3782 | 14350 | 0.2942 |
81
- | 0.1838 | 2.3865 | 14400 | 0.2940 |
82
- | 0.2191 | 2.3948 | 14450 | 0.2941 |
83
- | 0.1995 | 2.4030 | 14500 | 0.2922 |
84
- | 0.1954 | 2.4113 | 14550 | 0.2952 |
85
- | 0.1959 | 2.4196 | 14600 | 0.2969 |
86
- | 0.1849 | 2.4279 | 14650 | 0.2934 |
87
- | 0.1842 | 2.4362 | 14700 | 0.2938 |
88
- | 0.1858 | 2.4445 | 14750 | 0.2943 |
89
- | 0.1827 | 2.4528 | 14800 | 0.2942 |
90
- | 0.198 | 2.4611 | 14850 | 0.2921 |
91
- | 0.2311 | 2.4693 | 14900 | 0.2951 |
92
- | 0.2 | 2.4776 | 14950 | 0.2932 |
93
- | 0.1871 | 2.4859 | 15000 | 0.2935 |
94
- | 0.1833 | 2.4942 | 15050 | 0.2947 |
95
- | 0.2216 | 2.5025 | 15100 | 0.2921 |
96
- | 0.1829 | 2.5108 | 15150 | 0.2924 |
97
- | 0.1772 | 2.5191 | 15200 | 0.2916 |
98
- | 0.1806 | 2.5273 | 15250 | 0.2930 |
99
- | 0.2089 | 2.5356 | 15300 | 0.2920 |
100
- | 0.216 | 2.5439 | 15350 | 0.2922 |
101
- | 0.1763 | 2.5522 | 15400 | 0.2908 |
102
- | 0.1835 | 2.5605 | 15450 | 0.2924 |
103
- | 0.1928 | 2.5688 | 15500 | 0.2925 |
104
- | 0.1982 | 2.5771 | 15550 | 0.2895 |
105
- | 0.195 | 2.5853 | 15600 | 0.2907 |
106
- | 0.1791 | 2.5936 | 15650 | 0.2904 |
107
- | 0.1782 | 2.6019 | 15700 | 0.2915 |
108
- | 0.1871 | 2.6102 | 15750 | 0.2898 |
109
- | 0.1892 | 2.6185 | 15800 | 0.2916 |
110
- | 0.1921 | 2.6268 | 15850 | 0.2914 |
111
- | 0.1799 | 2.6351 | 15900 | 0.2890 |
112
- | 0.1923 | 2.6434 | 15950 | 0.2870 |
113
- | 0.2109 | 2.6516 | 16000 | 0.2910 |
114
- | 0.2088 | 2.6599 | 16050 | 0.2890 |
115
- | 0.1968 | 2.6682 | 16100 | 0.2880 |
116
- | 0.1938 | 2.6765 | 16150 | 0.2904 |
117
- | 0.1839 | 2.6848 | 16200 | 0.2921 |
118
- | 0.1764 | 2.6931 | 16250 | 0.2907 |
119
- | 0.1696 | 2.7014 | 16300 | 0.2897 |
120
- | 0.2276 | 2.7096 | 16350 | 0.2892 |
121
- | 0.1968 | 2.7179 | 16400 | 0.2875 |
122
- | 0.1991 | 2.7262 | 16450 | 0.2884 |
123
- | 0.1683 | 2.7345 | 16500 | 0.2883 |
124
- | 0.1765 | 2.7428 | 16550 | 0.2879 |
125
- | 0.1988 | 2.7511 | 16600 | 0.2883 |
126
- | 0.1921 | 2.7594 | 16650 | 0.2887 |
127
- | 0.1799 | 2.7676 | 16700 | 0.2894 |
128
- | 0.1907 | 2.7759 | 16750 | 0.2895 |
129
- | 0.1805 | 2.7842 | 16800 | 0.2894 |
130
- | 0.1595 | 2.7925 | 16850 | 0.2884 |
131
- | 0.1758 | 2.8008 | 16900 | 0.2870 |
132
- | 0.1768 | 2.8091 | 16950 | 0.2868 |
133
- | 0.2019 | 2.8174 | 17000 | 0.2859 |
134
- | 0.1985 | 2.8257 | 17050 | 0.2868 |
135
- | 0.2022 | 2.8339 | 17100 | 0.2865 |
136
- | 0.213 | 2.8422 | 17150 | 0.2858 |
137
- | 0.1809 | 2.8505 | 17200 | 0.2859 |
138
- | 0.1735 | 2.8588 | 17250 | 0.2868 |
139
- | 0.1929 | 2.8671 | 17300 | 0.2866 |
140
- | 0.1908 | 2.8754 | 17350 | 0.2862 |
141
- | 0.2051 | 2.8837 | 17400 | 0.2857 |
142
- | 0.1711 | 2.8919 | 17450 | 0.2863 |
143
- | 0.1926 | 2.9002 | 17500 | 0.2863 |
144
- | 0.1923 | 2.9085 | 17550 | 0.2847 |
145
- | 0.198 | 2.9168 | 17600 | 0.2870 |
146
- | 0.1882 | 2.9251 | 17650 | 0.2872 |
147
- | 0.1932 | 2.9334 | 17700 | 0.2846 |
148
- | 0.1839 | 2.9417 | 17750 | 0.2852 |
149
- | 0.2221 | 2.9500 | 17800 | 0.2836 |
150
- | 0.1874 | 2.9582 | 17850 | 0.2844 |
151
- | 0.1677 | 2.9665 | 17900 | 0.2851 |
152
- | 0.1802 | 2.9748 | 17950 | 0.2832 |
153
- | 0.1873 | 2.9831 | 18000 | 0.2846 |
154
- | 0.187 | 2.9914 | 18050 | 0.2856 |
155
- | 0.1837 | 2.9997 | 18100 | 0.2866 |
156
- | 0.1303 | 3.0080 | 18150 | 0.3016 |
157
- | 0.125 | 3.0162 | 18200 | 0.3052 |
158
- | 0.1264 | 3.0245 | 18250 | 0.3051 |
159
- | 0.1199 | 3.0328 | 18300 | 0.3092 |
160
- | 0.1403 | 3.0411 | 18350 | 0.3044 |
161
- | 0.128 | 3.0494 | 18400 | 0.3089 |
162
- | 0.1466 | 3.0577 | 18450 | 0.3043 |
163
- | 0.1307 | 3.0660 | 18500 | 0.3046 |
164
- | 0.135 | 3.0742 | 18550 | 0.3071 |
165
- | 0.1282 | 3.0825 | 18600 | 0.3053 |
166
- | 0.1343 | 3.0908 | 18650 | 0.3073 |
167
- | 0.1211 | 3.0991 | 18700 | 0.3069 |
168
- | 0.1382 | 3.1074 | 18750 | 0.3058 |
169
- | 0.1347 | 3.1157 | 18800 | 0.3064 |
170
- | 0.1246 | 3.1240 | 18850 | 0.3087 |
171
- | 0.1278 | 3.1323 | 18900 | 0.3075 |
172
- | 0.1233 | 3.1405 | 18950 | 0.3083 |
173
- | 0.1393 | 3.1488 | 19000 | 0.3069 |
174
- | 0.124 | 3.1571 | 19050 | 0.3049 |
175
- | 0.138 | 3.1654 | 19100 | 0.3064 |
176
- | 0.1355 | 3.1737 | 19150 | 0.3073 |
177
- | 0.1401 | 3.1820 | 19200 | 0.3084 |
178
- | 0.1196 | 3.1903 | 19250 | 0.3107 |
179
- | 0.1248 | 3.1985 | 19300 | 0.3088 |
180
- | 0.1342 | 3.2068 | 19350 | 0.3077 |
181
- | 0.1436 | 3.2151 | 19400 | 0.3062 |
182
- | 0.1467 | 3.2234 | 19450 | 0.3079 |
183
- | 0.1246 | 3.2317 | 19500 | 0.3095 |
184
- | 0.1293 | 3.2400 | 19550 | 0.3068 |
185
- | 0.1236 | 3.2483 | 19600 | 0.3100 |
186
- | 0.1385 | 3.2565 | 19650 | 0.3074 |
187
- | 0.1194 | 3.2648 | 19700 | 0.3068 |
188
- | 0.1283 | 3.2731 | 19750 | 0.3077 |
189
- | 0.1412 | 3.2814 | 19800 | 0.3064 |
190
- | 0.1209 | 3.2897 | 19850 | 0.3070 |
191
- | 0.145 | 3.2980 | 19900 | 0.3068 |
192
- | 0.1416 | 3.3063 | 19950 | 0.3052 |
193
- | 0.1138 | 3.3146 | 20000 | 0.3057 |
194
- | 0.1296 | 3.3228 | 20050 | 0.3076 |
195
- | 0.1419 | 3.3311 | 20100 | 0.3093 |
196
- | 0.1243 | 3.3394 | 20150 | 0.3083 |
197
- | 0.1206 | 3.3477 | 20200 | 0.3082 |
198
- | 0.1279 | 3.3560 | 20250 | 0.3070 |
199
- | 0.13 | 3.3643 | 20300 | 0.3070 |
200
- | 0.1284 | 3.3726 | 20350 | 0.3064 |
201
- | 0.1259 | 3.3808 | 20400 | 0.3074 |
202
- | 0.1255 | 3.3891 | 20450 | 0.3052 |
203
- | 0.1227 | 3.3974 | 20500 | 0.3062 |
204
- | 0.1381 | 3.4057 | 20550 | 0.3066 |
205
- | 0.1304 | 3.4140 | 20600 | 0.3065 |
206
- | 0.1388 | 3.4223 | 20650 | 0.3071 |
207
- | 0.1227 | 3.4306 | 20700 | 0.3065 |
208
- | 0.1185 | 3.4388 | 20750 | 0.3062 |
209
- | 0.1289 | 3.4471 | 20800 | 0.3083 |
210
- | 0.1367 | 3.4554 | 20850 | 0.3089 |
211
- | 0.1241 | 3.4637 | 20900 | 0.3070 |
212
- | 0.1137 | 3.4720 | 20950 | 0.3092 |
213
- | 0.1177 | 3.4803 | 21000 | 0.3080 |
214
- | 0.1369 | 3.4886 | 21050 | 0.3073 |
215
- | 0.126 | 3.4969 | 21100 | 0.3072 |
216
- | 0.1174 | 3.5051 | 21150 | 0.3074 |
217
- | 0.1235 | 3.5134 | 21200 | 0.3077 |
218
- | 0.1297 | 3.5217 | 21250 | 0.3068 |
219
- | 0.1377 | 3.5300 | 21300 | 0.3078 |
220
- | 0.1215 | 3.5383 | 21350 | 0.3062 |
221
- | 0.1358 | 3.5466 | 21400 | 0.3069 |
222
- | 0.1123 | 3.5549 | 21450 | 0.3076 |
223
- | 0.1358 | 3.5631 | 21500 | 0.3080 |
224
- | 0.1396 | 3.5714 | 21550 | 0.3061 |
225
- | 0.1216 | 3.5797 | 21600 | 0.3075 |
226
- | 0.1162 | 3.5880 | 21650 | 0.3081 |
227
- | 0.128 | 3.5963 | 21700 | 0.3061 |
228
- | 0.1173 | 3.6046 | 21750 | 0.3070 |
229
- | 0.125 | 3.6129 | 21800 | 0.3065 |
230
- | 0.1262 | 3.6211 | 21850 | 0.3077 |
231
- | 0.1249 | 3.6294 | 21900 | 0.3073 |
232
- | 0.1212 | 3.6377 | 21950 | 0.3071 |
233
- | 0.1188 | 3.6460 | 22000 | 0.3065 |
234
- | 0.1241 | 3.6543 | 22050 | 0.3054 |
235
- | 0.1302 | 3.6626 | 22100 | 0.3064 |
236
- | 0.1329 | 3.6709 | 22150 | 0.3055 |
237
- | 0.1276 | 3.6792 | 22200 | 0.3059 |
238
- | 0.1336 | 3.6874 | 22250 | 0.3082 |
239
- | 0.1173 | 3.6957 | 22300 | 0.3091 |
240
- | 0.1205 | 3.7040 | 22350 | 0.3075 |
241
- | 0.1196 | 3.7123 | 22400 | 0.3080 |
242
- | 0.1128 | 3.7206 | 22450 | 0.3066 |
243
- | 0.1188 | 3.7289 | 22500 | 0.3079 |
244
- | 0.1154 | 3.7372 | 22550 | 0.3076 |
245
- | 0.135 | 3.7454 | 22600 | 0.3076 |
246
- | 0.1341 | 3.7537 | 22650 | 0.3067 |
247
- | 0.1396 | 3.7620 | 22700 | 0.3062 |
248
- | 0.1336 | 3.7703 | 22750 | 0.3059 |
249
- | 0.1295 | 3.7786 | 22800 | 0.3066 |
250
- | 0.1226 | 3.7869 | 22850 | 0.3068 |
251
- | 0.1299 | 3.7952 | 22900 | 0.3067 |
252
- | 0.1355 | 3.8034 | 22950 | 0.3068 |
253
- | 0.1197 | 3.8117 | 23000 | 0.3069 |
254
- | 0.1196 | 3.8200 | 23050 | 0.3075 |
255
- | 0.1413 | 3.8283 | 23100 | 0.3066 |
256
- | 0.1377 | 3.8366 | 23150 | 0.3064 |
257
- | 0.1082 | 3.8449 | 23200 | 0.3074 |
258
- | 0.129 | 3.8532 | 23250 | 0.3074 |
259
- | 0.1279 | 3.8615 | 23300 | 0.3073 |
260
- | 0.1296 | 3.8697 | 23350 | 0.3064 |
261
- | 0.121 | 3.8780 | 23400 | 0.3072 |
262
- | 0.1267 | 3.8863 | 23450 | 0.3069 |
263
- | 0.1224 | 3.8946 | 23500 | 0.3071 |
264
- | 0.1187 | 3.9029 | 23550 | 0.3073 |
265
- | 0.1264 | 3.9112 | 23600 | 0.3074 |
266
- | 0.1252 | 3.9195 | 23650 | 0.3073 |
267
- | 0.1279 | 3.9277 | 23700 | 0.3072 |
268
- | 0.1262 | 3.9360 | 23750 | 0.3070 |
269
- | 0.1255 | 3.9443 | 23800 | 0.3068 |
270
- | 0.1227 | 3.9526 | 23850 | 0.3066 |
271
- | 0.1285 | 3.9609 | 23900 | 0.3068 |
272
- | 0.1225 | 3.9692 | 23950 | 0.3068 |
273
- | 0.1302 | 3.9775 | 24000 | 0.3066 |
274
- | 0.1501 | 3.9857 | 24050 | 0.3065 |
275
- | 0.126 | 3.9940 | 24100 | 0.3065 |
276
 
277
 
278
  ### Framework versions
279
 
280
  - PEFT 0.12.0
281
  - Transformers 4.44.2
282
- - Pytorch 2.4.0+cu121
283
  - Datasets 3.0.0
284
  - Tokenizers 0.19.1
 
18
 
19
  This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
20
  It achieves the following results on the evaluation set:
21
+ - Loss: 0.3056
22
 
23
  ## Model description
24
 
 
50
 
51
  | Training Loss | Epoch | Step | Validation Loss |
52
  |:-------------:|:------:|:-----:|:---------------:|
53
+ | 0.2053 | 2.3256 | 15000 | 0.2969 |
54
+ | 0.1942 | 2.3333 | 15050 | 0.2944 |
55
+ | 0.1871 | 2.3411 | 15100 | 0.2949 |
56
+ | 0.1845 | 2.3488 | 15150 | 0.2954 |
57
+ | 0.2108 | 2.3566 | 15200 | 0.2950 |
58
+ | 0.2065 | 2.3643 | 15250 | 0.2951 |
59
+ | 0.193 | 2.3721 | 15300 | 0.2961 |
60
+ | 0.1947 | 2.3798 | 15350 | 0.2953 |
61
+ | 0.1922 | 2.3876 | 15400 | 0.2949 |
62
+ | 0.1777 | 2.3953 | 15450 | 0.2949 |
63
+ | 0.212 | 2.4031 | 15500 | 0.2949 |
64
+ | 0.1962 | 2.4109 | 15550 | 0.2949 |
65
+ | 0.1789 | 2.4186 | 15600 | 0.2951 |
66
+ | 0.2183 | 2.4264 | 15650 | 0.2923 |
67
+ | 0.1962 | 2.4341 | 15700 | 0.2947 |
68
+ | 0.1907 | 2.4419 | 15750 | 0.2928 |
69
+ | 0.1936 | 2.4496 | 15800 | 0.2956 |
70
+ | 0.2086 | 2.4574 | 15850 | 0.2933 |
71
+ | 0.1895 | 2.4651 | 15900 | 0.2959 |
72
+ | 0.2157 | 2.4729 | 15950 | 0.2932 |
73
+ | 0.1897 | 2.4806 | 16000 | 0.2926 |
74
+ | 0.1862 | 2.4884 | 16050 | 0.2937 |
75
+ | 0.1899 | 2.4961 | 16100 | 0.2955 |
76
+ | 0.187 | 2.5039 | 16150 | 0.2970 |
77
+ | 0.2126 | 2.5116 | 16200 | 0.2941 |
78
+ | 0.1973 | 2.5194 | 16250 | 0.2933 |
79
+ | 0.1743 | 2.5271 | 16300 | 0.2930 |
80
+ | 0.1958 | 2.5349 | 16350 | 0.2938 |
81
+ | 0.2162 | 2.5426 | 16400 | 0.2919 |
82
+ | 0.1872 | 2.5504 | 16450 | 0.2936 |
83
+ | 0.1821 | 2.5581 | 16500 | 0.2940 |
84
+ | 0.2193 | 2.5659 | 16550 | 0.2940 |
85
+ | 0.1983 | 2.5736 | 16600 | 0.2943 |
86
+ | 0.2121 | 2.5814 | 16650 | 0.2941 |
87
+ | 0.1969 | 2.5891 | 16700 | 0.2923 |
88
+ | 0.1963 | 2.5969 | 16750 | 0.2921 |
89
+ | 0.2042 | 2.6047 | 16800 | 0.2938 |
90
+ | 0.1921 | 2.6124 | 16850 | 0.2914 |
91
+ | 0.2081 | 2.6202 | 16900 | 0.2917 |
92
+ | 0.1711 | 2.6279 | 16950 | 0.2923 |
93
+ | 0.1897 | 2.6357 | 17000 | 0.2918 |
94
+ | 0.1917 | 2.6434 | 17050 | 0.2933 |
95
+ | 0.1991 | 2.6512 | 17100 | 0.2909 |
96
+ | 0.2055 | 2.6589 | 17150 | 0.2930 |
97
+ | 0.1932 | 2.6667 | 17200 | 0.2907 |
98
+ | 0.2043 | 2.6744 | 17250 | 0.2937 |
99
+ | 0.1922 | 2.6822 | 17300 | 0.2922 |
100
+ | 0.1785 | 2.6899 | 17350 | 0.2922 |
101
+ | 0.2337 | 2.6977 | 17400 | 0.2908 |
102
+ | 0.1933 | 2.7054 | 17450 | 0.2922 |
103
+ | 0.2012 | 2.7132 | 17500 | 0.2914 |
104
+ | 0.1959 | 2.7209 | 17550 | 0.2910 |
105
+ | 0.1933 | 2.7287 | 17600 | 0.2882 |
106
+ | 0.1824 | 2.7364 | 17650 | 0.2889 |
107
+ | 0.2016 | 2.7442 | 17700 | 0.2898 |
108
+ | 0.2024 | 2.7519 | 17750 | 0.2915 |
109
+ | 0.2101 | 2.7597 | 17800 | 0.2888 |
110
+ | 0.1782 | 2.7674 | 17850 | 0.2908 |
111
+ | 0.2047 | 2.7752 | 17900 | 0.2902 |
112
+ | 0.195 | 2.7829 | 17950 | 0.2895 |
113
+ | 0.2122 | 2.7907 | 18000 | 0.2884 |
114
+ | 0.2099 | 2.7984 | 18050 | 0.2869 |
115
+ | 0.2054 | 2.8062 | 18100 | 0.2882 |
116
+ | 0.193 | 2.8140 | 18150 | 0.2884 |
117
+ | 0.187 | 2.8217 | 18200 | 0.2895 |
118
+ | 0.1997 | 2.8295 | 18250 | 0.2883 |
119
+ | 0.1885 | 2.8372 | 18300 | 0.2896 |
120
+ | 0.1957 | 2.8450 | 18350 | 0.2871 |
121
+ | 0.1905 | 2.8527 | 18400 | 0.2879 |
122
+ | 0.1933 | 2.8605 | 18450 | 0.2880 |
123
+ | 0.1953 | 2.8682 | 18500 | 0.2871 |
124
+ | 0.205 | 2.8760 | 18550 | 0.2865 |
125
+ | 0.191 | 2.8837 | 18600 | 0.2870 |
126
+ | 0.1903 | 2.8915 | 18650 | 0.2870 |
127
+ | 0.1897 | 2.8992 | 18700 | 0.2873 |
128
+ | 0.1966 | 2.9070 | 18750 | 0.2871 |
129
+ | 0.228 | 2.9147 | 18800 | 0.2875 |
130
+ | 0.1948 | 2.9225 | 18850 | 0.2870 |
131
+ | 0.1843 | 2.9302 | 18900 | 0.2859 |
132
+ | 0.2037 | 2.9380 | 18950 | 0.2872 |
133
+ | 0.2087 | 2.9457 | 19000 | 0.2855 |
134
+ | 0.1777 | 2.9535 | 19050 | 0.2864 |
135
+ | 0.1852 | 2.9612 | 19100 | 0.2866 |
136
+ | 0.1879 | 2.9690 | 19150 | 0.2858 |
137
+ | 0.2096 | 2.9767 | 19200 | 0.2848 |
138
+ | 0.1846 | 2.9845 | 19250 | 0.2857 |
139
+ | 0.1782 | 2.9922 | 19300 | 0.2859 |
140
+ | 0.1762 | 3.0 | 19350 | 0.2864 |
141
+ | 0.1339 | 3.0078 | 19400 | 0.3056 |
142
+ | 0.1356 | 3.0155 | 19450 | 0.3067 |
143
+ | 0.136 | 3.0233 | 19500 | 0.3073 |
144
+ | 0.1389 | 3.0310 | 19550 | 0.3089 |
145
+ | 0.1362 | 3.0388 | 19600 | 0.3084 |
146
+ | 0.1384 | 3.0465 | 19650 | 0.3085 |
147
+ | 0.1167 | 3.0543 | 19700 | 0.3092 |
148
+ | 0.1291 | 3.0620 | 19750 | 0.3078 |
149
+ | 0.1292 | 3.0698 | 19800 | 0.3092 |
150
+ | 0.1257 | 3.0775 | 19850 | 0.3099 |
151
+ | 0.1384 | 3.0853 | 19900 | 0.3088 |
152
+ | 0.1355 | 3.0930 | 19950 | 0.3076 |
153
+ | 0.1244 | 3.1008 | 20000 | 0.3088 |
154
+ | 0.141 | 3.1085 | 20050 | 0.3082 |
155
+ | 0.1398 | 3.1163 | 20100 | 0.3080 |
156
+ | 0.1415 | 3.1240 | 20150 | 0.3085 |
157
+ | 0.1521 | 3.1318 | 20200 | 0.3067 |
158
+ | 0.1266 | 3.1395 | 20250 | 0.3097 |
159
+ | 0.1254 | 3.1473 | 20300 | 0.3101 |
160
+ | 0.1403 | 3.1550 | 20350 | 0.3053 |
161
+ | 0.1395 | 3.1628 | 20400 | 0.3085 |
162
+ | 0.1328 | 3.1705 | 20450 | 0.3074 |
163
+ | 0.1381 | 3.1783 | 20500 | 0.3090 |
164
+ | 0.1323 | 3.1860 | 20550 | 0.3058 |
165
+ | 0.1299 | 3.1938 | 20600 | 0.3092 |
166
+ | 0.1432 | 3.2016 | 20650 | 0.3074 |
167
+ | 0.1399 | 3.2093 | 20700 | 0.3071 |
168
+ | 0.1288 | 3.2171 | 20750 | 0.3076 |
169
+ | 0.1464 | 3.2248 | 20800 | 0.3060 |
170
+ | 0.1347 | 3.2326 | 20850 | 0.3066 |
171
+ | 0.1336 | 3.2403 | 20900 | 0.3080 |
172
+ | 0.1245 | 3.2481 | 20950 | 0.3069 |
173
+ | 0.1305 | 3.2558 | 21000 | 0.3080 |
174
+ | 0.1379 | 3.2636 | 21050 | 0.3050 |
175
+ | 0.1269 | 3.2713 | 21100 | 0.3074 |
176
+ | 0.1379 | 3.2791 | 21150 | 0.3067 |
177
+ | 0.1348 | 3.2868 | 21200 | 0.3077 |
178
+ | 0.1261 | 3.2946 | 21250 | 0.3116 |
179
+ | 0.1354 | 3.3023 | 21300 | 0.3064 |
180
+ | 0.1323 | 3.3101 | 21350 | 0.3061 |
181
+ | 0.1255 | 3.3178 | 21400 | 0.3078 |
182
+ | 0.135 | 3.3256 | 21450 | 0.3073 |
183
+ | 0.1354 | 3.3333 | 21500 | 0.3070 |
184
+ | 0.1391 | 3.3411 | 21550 | 0.3066 |
185
+ | 0.1295 | 3.3488 | 21600 | 0.3086 |
186
+ | 0.1215 | 3.3566 | 21650 | 0.3085 |
187
+ | 0.1411 | 3.3643 | 21700 | 0.3072 |
188
+ | 0.1393 | 3.3721 | 21750 | 0.3090 |
189
+ | 0.132 | 3.3798 | 21800 | 0.3086 |
190
+ | 0.1199 | 3.3876 | 21850 | 0.3089 |
191
+ | 0.1349 | 3.3953 | 21900 | 0.3069 |
192
+ | 0.1325 | 3.4031 | 21950 | 0.3084 |
193
+ | 0.1247 | 3.4109 | 22000 | 0.3082 |
194
+ | 0.1178 | 3.4186 | 22050 | 0.3062 |
195
+ | 0.1218 | 3.4264 | 22100 | 0.3090 |
196
+ | 0.131 | 3.4341 | 22150 | 0.3100 |
197
+ | 0.1274 | 3.4419 | 22200 | 0.3070 |
198
+ | 0.136 | 3.4496 | 22250 | 0.3083 |
199
+ | 0.1458 | 3.4574 | 22300 | 0.3076 |
200
+ | 0.1365 | 3.4651 | 22350 | 0.3087 |
201
+ | 0.1362 | 3.4729 | 22400 | 0.3071 |
202
+ | 0.1318 | 3.4806 | 22450 | 0.3073 |
203
+ | 0.138 | 3.4884 | 22500 | 0.3067 |
204
+ | 0.1413 | 3.4961 | 22550 | 0.3080 |
205
+ | 0.1365 | 3.5039 | 22600 | 0.3087 |
206
+ | 0.1236 | 3.5116 | 22650 | 0.3078 |
207
+ | 0.1503 | 3.5194 | 22700 | 0.3063 |
208
+ | 0.1437 | 3.5271 | 22750 | 0.3070 |
209
+ | 0.1338 | 3.5349 | 22800 | 0.3070 |
210
+ | 0.1256 | 3.5426 | 22850 | 0.3080 |
211
+ | 0.1296 | 3.5504 | 22900 | 0.3074 |
212
+ | 0.1286 | 3.5581 | 22950 | 0.3061 |
213
+ | 0.1334 | 3.5659 | 23000 | 0.3075 |
214
+ | 0.133 | 3.5736 | 23050 | 0.3058 |
215
+ | 0.113 | 3.5814 | 23100 | 0.3060 |
216
+ | 0.1238 | 3.5891 | 23150 | 0.3052 |
217
+ | 0.1398 | 3.5969 | 23200 | 0.3044 |
218
+ | 0.142 | 3.6047 | 23250 | 0.3054 |
219
+ | 0.1257 | 3.6124 | 23300 | 0.3059 |
220
+ | 0.1324 | 3.6202 | 23350 | 0.3052 |
221
+ | 0.1376 | 3.6279 | 23400 | 0.3039 |
222
+ | 0.1343 | 3.6357 | 23450 | 0.3037 |
223
+ | 0.1264 | 3.6434 | 23500 | 0.3054 |
224
+ | 0.1263 | 3.6512 | 23550 | 0.3062 |
225
+ | 0.127 | 3.6589 | 23600 | 0.3054 |
226
+ | 0.1187 | 3.6667 | 23650 | 0.3054 |
227
+ | 0.1204 | 3.6744 | 23700 | 0.3059 |
228
+ | 0.1148 | 3.6822 | 23750 | 0.3065 |
229
+ | 0.1205 | 3.6899 | 23800 | 0.3073 |
230
+ | 0.1277 | 3.6977 | 23850 | 0.3067 |
231
+ | 0.1356 | 3.7054 | 23900 | 0.3067 |
232
+ | 0.1518 | 3.7132 | 23950 | 0.3064 |
233
+ | 0.1307 | 3.7209 | 24000 | 0.3062 |
234
+ | 0.1344 | 3.7287 | 24050 | 0.3061 |
235
+ | 0.1326 | 3.7364 | 24100 | 0.3065 |
236
+ | 0.1246 | 3.7442 | 24150 | 0.3074 |
237
+ | 0.1319 | 3.7519 | 24200 | 0.3071 |
238
+ | 0.1436 | 3.7597 | 24250 | 0.3063 |
239
+ | 0.1389 | 3.7674 | 24300 | 0.3064 |
240
+ | 0.1275 | 3.7752 | 24350 | 0.3065 |
241
+ | 0.1353 | 3.7829 | 24400 | 0.3061 |
242
+ | 0.1289 | 3.7907 | 24450 | 0.3056 |
243
+ | 0.1326 | 3.7984 | 24500 | 0.3053 |
244
+ | 0.1244 | 3.8062 | 24550 | 0.3054 |
245
+ | 0.1287 | 3.8140 | 24600 | 0.3056 |
246
+ | 0.1168 | 3.8217 | 24650 | 0.3058 |
247
+ | 0.1298 | 3.8295 | 24700 | 0.3055 |
248
+ | 0.1231 | 3.8372 | 24750 | 0.3057 |
249
+ | 0.1289 | 3.8450 | 24800 | 0.3059 |
250
+ | 0.1184 | 3.8527 | 24850 | 0.3056 |
251
+ | 0.1226 | 3.8605 | 24900 | 0.3055 |
252
+ | 0.1593 | 3.8682 | 24950 | 0.3057 |
253
+ | 0.128 | 3.8760 | 25000 | 0.3064 |
254
+ | 0.1332 | 3.8837 | 25050 | 0.3058 |
255
+ | 0.1397 | 3.8915 | 25100 | 0.3055 |
256
+ | 0.1059 | 3.8992 | 25150 | 0.3058 |
257
+ | 0.1281 | 3.9070 | 25200 | 0.3054 |
258
+ | 0.1277 | 3.9147 | 25250 | 0.3056 |
259
+ | 0.1119 | 3.9225 | 25300 | 0.3059 |
260
+ | 0.1212 | 3.9302 | 25350 | 0.3059 |
261
+ | 0.1131 | 3.9380 | 25400 | 0.3059 |
262
+ | 0.1407 | 3.9457 | 25450 | 0.3059 |
263
+ | 0.1286 | 3.9535 | 25500 | 0.3056 |
264
+ | 0.1252 | 3.9612 | 25550 | 0.3056 |
265
+ | 0.138 | 3.9690 | 25600 | 0.3056 |
266
+ | 0.1245 | 3.9767 | 25650 | 0.3056 |
267
+ | 0.1213 | 3.9845 | 25700 | 0.3056 |
268
+ | 0.1276 | 3.9922 | 25750 | 0.3056 |
269
+ | 0.1328 | 4.0 | 25800 | 0.3056 |
 
 
 
 
 
 
270
 
271
 
272
  ### Framework versions
273
 
274
  - PEFT 0.12.0
275
  - Transformers 4.44.2
276
+ - Pytorch 2.4.1+cu121
277
  - Datasets 3.0.0
278
  - Tokenizers 0.19.1
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "v_proj",
24
- "o_proj",
25
- "gate_proj",
26
  "up_proj",
27
- "q_proj",
28
  "k_proj",
29
- "down_proj"
 
 
 
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
 
23
  "up_proj",
 
24
  "k_proj",
25
+ "gate_proj",
26
+ "o_proj",
27
+ "down_proj",
28
+ "v_proj",
29
+ "q_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:098db413d6c11af26ea80e549bc6b0b97b383aed1a24119a350c09aed1a3cb24
3
  size 2332095256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a700f3ab144429aa5bccb81a0341e97f8a4ac23bd05bba88d4137e7f1c0d0e1
3
  size 2332095256
runs/Sep24_12-56-17_5e17c42266da/events.out.tfevents.1727182583.5e17c42266da.9001.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b11bed4ceff182facb5e292a205e9d41c6f8dc60390619b3f0158b1de467a87
3
+ size 175606
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bae5bf8f8e1e83312e9d8d7d31020fadf6e7b078a3850ce818fe97ebd73ef77d
3
  size 5496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0aebd9b1ef838cb16fa3ad38837049e149f15db8e0f3f3b037395dfab7a4203c
3
  size 5496