End of training
Browse files- README.md +180 -208
- adapter_config.json +3 -3
- adapter_model.safetensors +1 -1
- runs/Jul09_05-57-56_2e02f1ec7e57/events.out.tfevents.1720504688.2e02f1ec7e57.984.0 +3 -0
- training_args.bin +1 -1
README.md
CHANGED
@@ -18,7 +18,7 @@ should probably proofread and complete it, then remove this comment. -->
|
|
18 |
|
19 |
This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
-
- Loss: 0.
|
22 |
|
23 |
## Model description
|
24 |
|
@@ -50,218 +50,190 @@ The following hyperparameters were used during training:
|
|
50 |
|
51 |
| Training Loss | Epoch | Step | Validation Loss |
|
52 |
|:-------------:|:------:|:-----:|:---------------:|
|
53 |
-
| 0.
|
54 |
-
| 0.
|
55 |
-
| 0.
|
56 |
-
| 0.
|
57 |
-
| 0.
|
58 |
-
| 0.
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.
|
62 |
-
| 0.
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
81 |
-
| 0.
|
82 |
-
| 0.
|
83 |
-
| 0.
|
84 |
-
| 0.
|
85 |
-
| 0.
|
86 |
-
| 0.
|
87 |
-
| 0.
|
88 |
-
| 0.
|
89 |
-
| 0.
|
90 |
-
| 0.
|
91 |
-
| 0.
|
92 |
-
| 0.
|
93 |
-
| 0.
|
94 |
-
| 0.
|
95 |
-
| 0.
|
96 |
-
| 0.
|
97 |
-
| 0.
|
98 |
-
| 0.
|
99 |
-
| 0.
|
100 |
-
| 0.
|
101 |
-
| 0.
|
102 |
-
| 0.
|
103 |
-
| 0.
|
104 |
-
| 0.
|
105 |
-
| 0.
|
106 |
-
| 0.
|
107 |
-
| 0.
|
108 |
-
| 0.
|
109 |
-
| 0.
|
110 |
-
| 0.
|
111 |
-
| 0.
|
112 |
-
| 0.
|
113 |
-
| 0.
|
114 |
-
| 0.
|
115 |
-
| 0.
|
116 |
-
| 0.
|
117 |
-
| 0.
|
118 |
-
| 0.
|
119 |
-
| 0.
|
120 |
-
| 0.
|
121 |
-
| 0.
|
122 |
-
| 0.
|
123 |
-
| 0.
|
124 |
-
| 0.
|
125 |
-
| 0.
|
126 |
-
| 0.
|
127 |
-
| 0.
|
128 |
-
| 0.
|
129 |
-
| 0.
|
130 |
-
| 0.
|
131 |
-
| 0.
|
132 |
-
| 0.
|
133 |
-
| 0.
|
134 |
-
| 0.
|
135 |
-
| 0.
|
136 |
-
| 0.
|
137 |
-
| 0.
|
138 |
-
| 0.
|
139 |
-
| 0.
|
140 |
-
| 0.
|
141 |
-
| 0.
|
142 |
-
| 0.
|
143 |
-
| 0.
|
144 |
-
| 0.
|
145 |
-
| 0.
|
146 |
-
| 0.
|
147 |
-
| 0.
|
148 |
-
| 0.
|
149 |
-
| 0.
|
150 |
-
| 0.
|
151 |
-
| 0.
|
152 |
-
| 0.
|
153 |
-
| 0.
|
154 |
-
| 0.
|
155 |
-
| 0.
|
156 |
-
| 0.
|
157 |
-
| 0.
|
158 |
-
| 0.
|
159 |
-
| 0.
|
160 |
-
| 0.
|
161 |
-
| 0.
|
162 |
-
| 0.
|
163 |
-
| 0.
|
164 |
-
| 0.
|
165 |
-
| 0.
|
166 |
-
| 0.
|
167 |
-
| 0.
|
168 |
-
| 0.
|
169 |
-
| 0.
|
170 |
-
| 0.
|
171 |
-
| 0.
|
172 |
-
| 0.
|
173 |
-
| 0.
|
174 |
-
| 0.
|
175 |
-
| 0.
|
176 |
-
| 0.
|
177 |
-
| 0.1336 | 3.
|
178 |
-
| 0.
|
179 |
-
| 0.
|
180 |
-
| 0.
|
181 |
-
| 0.
|
182 |
-
| 0.
|
183 |
-
| 0.
|
184 |
-
| 0.
|
185 |
-
| 0.
|
186 |
-
| 0.
|
187 |
-
| 0.
|
188 |
-
| 0.
|
189 |
-
| 0.
|
190 |
-
| 0.
|
191 |
-
| 0.
|
192 |
-
| 0.
|
193 |
-
| 0.
|
194 |
-
| 0.
|
195 |
-
| 0.
|
196 |
-
| 0.
|
197 |
-
| 0.
|
198 |
-
| 0.
|
199 |
-
| 0.
|
200 |
-
| 0.
|
201 |
-
| 0.
|
202 |
-
| 0.
|
203 |
-
| 0.
|
204 |
-
| 0.
|
205 |
-
| 0.
|
206 |
-
| 0.
|
207 |
-
| 0.
|
208 |
-
| 0.
|
209 |
-
| 0.
|
210 |
-
| 0.
|
211 |
-
| 0.
|
212 |
-
| 0.
|
213 |
-
| 0.
|
214 |
-
| 0.
|
215 |
-
| 0.
|
216 |
-
| 0.
|
217 |
-
| 0.
|
218 |
-
| 0.
|
219 |
-
| 0.
|
220 |
-
| 0.
|
221 |
-
| 0.
|
222 |
-
| 0.
|
223 |
-
| 0.
|
224 |
-
| 0.
|
225 |
-
| 0.
|
226 |
-
| 0.
|
227 |
-
| 0.
|
228 |
-
| 0.
|
229 |
-
| 0.
|
230 |
-
| 0.
|
231 |
-
| 0.1172 | 3.7543 | 20900 | 0.3030 |
|
232 |
-
| 0.1251 | 3.7632 | 20950 | 0.3035 |
|
233 |
-
| 0.1132 | 3.7722 | 21000 | 0.3025 |
|
234 |
-
| 0.1301 | 3.7812 | 21050 | 0.3015 |
|
235 |
-
| 0.1262 | 3.7902 | 21100 | 0.3011 |
|
236 |
-
| 0.1287 | 3.7992 | 21150 | 0.3014 |
|
237 |
-
| 0.1283 | 3.8082 | 21200 | 0.3017 |
|
238 |
-
| 0.1296 | 3.8171 | 21250 | 0.3021 |
|
239 |
-
| 0.1137 | 3.8261 | 21300 | 0.3025 |
|
240 |
-
| 0.1279 | 3.8351 | 21350 | 0.3029 |
|
241 |
-
| 0.114 | 3.8441 | 21400 | 0.3023 |
|
242 |
-
| 0.1213 | 3.8531 | 21450 | 0.3019 |
|
243 |
-
| 0.1174 | 3.8620 | 21500 | 0.3016 |
|
244 |
-
| 0.1156 | 3.8710 | 21550 | 0.3019 |
|
245 |
-
| 0.1194 | 3.8800 | 21600 | 0.3017 |
|
246 |
-
| 0.1136 | 3.8890 | 21650 | 0.3018 |
|
247 |
-
| 0.124 | 3.8980 | 21700 | 0.3012 |
|
248 |
-
| 0.1204 | 3.9070 | 21750 | 0.3013 |
|
249 |
-
| 0.1348 | 3.9159 | 21800 | 0.3015 |
|
250 |
-
| 0.1237 | 3.9249 | 21850 | 0.3019 |
|
251 |
-
| 0.1213 | 3.9339 | 21900 | 0.3020 |
|
252 |
-
| 0.114 | 3.9429 | 21950 | 0.3020 |
|
253 |
-
| 0.1136 | 3.9519 | 22000 | 0.3019 |
|
254 |
-
| 0.1195 | 3.9608 | 22050 | 0.3017 |
|
255 |
-
| 0.1307 | 3.9698 | 22100 | 0.3017 |
|
256 |
-
| 0.1355 | 3.9788 | 22150 | 0.3015 |
|
257 |
-
| 0.1165 | 3.9878 | 22200 | 0.3016 |
|
258 |
-
| 0.1296 | 3.9968 | 22250 | 0.3016 |
|
259 |
|
260 |
|
261 |
### Framework versions
|
262 |
|
263 |
- PEFT 0.11.1
|
264 |
-
- Transformers 4.42.
|
265 |
- Pytorch 2.3.0+cu121
|
266 |
- Datasets 2.20.0
|
267 |
- Tokenizers 0.19.1
|
|
|
18 |
|
19 |
This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
+
- Loss: 0.2966
|
22 |
|
23 |
## Model description
|
24 |
|
|
|
50 |
|
51 |
| Training Loss | Epoch | Step | Validation Loss |
|
52 |
|:-------------:|:------:|:-----:|:---------------:|
|
53 |
+
| 0.1821 | 2.4463 | 14000 | 0.2874 |
|
54 |
+
| 0.194 | 2.4550 | 14050 | 0.2881 |
|
55 |
+
| 0.174 | 2.4637 | 14100 | 0.2896 |
|
56 |
+
| 0.1964 | 2.4725 | 14150 | 0.2865 |
|
57 |
+
| 0.1885 | 2.4812 | 14200 | 0.2862 |
|
58 |
+
| 0.1929 | 2.4900 | 14250 | 0.2889 |
|
59 |
+
| 0.1873 | 2.4987 | 14300 | 0.2877 |
|
60 |
+
| 0.2055 | 2.5074 | 14350 | 0.2868 |
|
61 |
+
| 0.1769 | 2.5162 | 14400 | 0.2875 |
|
62 |
+
| 0.1796 | 2.5249 | 14450 | 0.2870 |
|
63 |
+
| 0.1863 | 2.5336 | 14500 | 0.2860 |
|
64 |
+
| 0.1973 | 2.5424 | 14550 | 0.2851 |
|
65 |
+
| 0.1799 | 2.5511 | 14600 | 0.2854 |
|
66 |
+
| 0.1911 | 2.5598 | 14650 | 0.2876 |
|
67 |
+
| 0.1893 | 2.5686 | 14700 | 0.2850 |
|
68 |
+
| 0.1919 | 2.5773 | 14750 | 0.2859 |
|
69 |
+
| 0.1842 | 2.5861 | 14800 | 0.2854 |
|
70 |
+
| 0.1852 | 2.5948 | 14850 | 0.2843 |
|
71 |
+
| 0.1992 | 2.6035 | 14900 | 0.2831 |
|
72 |
+
| 0.1728 | 2.6123 | 14950 | 0.2857 |
|
73 |
+
| 0.1798 | 2.6210 | 15000 | 0.2842 |
|
74 |
+
| 0.2039 | 2.6297 | 15050 | 0.2836 |
|
75 |
+
| 0.1929 | 2.6385 | 15100 | 0.2847 |
|
76 |
+
| 0.2046 | 2.6472 | 15150 | 0.2840 |
|
77 |
+
| 0.1805 | 2.6559 | 15200 | 0.2848 |
|
78 |
+
| 0.1839 | 2.6647 | 15250 | 0.2857 |
|
79 |
+
| 0.1908 | 2.6734 | 15300 | 0.2826 |
|
80 |
+
| 0.2015 | 2.6822 | 15350 | 0.2825 |
|
81 |
+
| 0.1706 | 2.6909 | 15400 | 0.2825 |
|
82 |
+
| 0.1788 | 2.6996 | 15450 | 0.2823 |
|
83 |
+
| 0.1961 | 2.7084 | 15500 | 0.2828 |
|
84 |
+
| 0.1979 | 2.7171 | 15550 | 0.2814 |
|
85 |
+
| 0.1852 | 2.7258 | 15600 | 0.2839 |
|
86 |
+
| 0.1828 | 2.7346 | 15650 | 0.2845 |
|
87 |
+
| 0.1919 | 2.7433 | 15700 | 0.2828 |
|
88 |
+
| 0.2192 | 2.7521 | 15750 | 0.2809 |
|
89 |
+
| 0.1792 | 2.7608 | 15800 | 0.2810 |
|
90 |
+
| 0.1877 | 2.7695 | 15850 | 0.2814 |
|
91 |
+
| 0.1888 | 2.7783 | 15900 | 0.2818 |
|
92 |
+
| 0.1833 | 2.7870 | 15950 | 0.2837 |
|
93 |
+
| 0.1975 | 2.7957 | 16000 | 0.2819 |
|
94 |
+
| 0.1773 | 2.8045 | 16050 | 0.2825 |
|
95 |
+
| 0.2118 | 2.8132 | 16100 | 0.2819 |
|
96 |
+
| 0.1926 | 2.8219 | 16150 | 0.2836 |
|
97 |
+
| 0.1946 | 2.8307 | 16200 | 0.2810 |
|
98 |
+
| 0.1905 | 2.8394 | 16250 | 0.2815 |
|
99 |
+
| 0.1924 | 2.8482 | 16300 | 0.2814 |
|
100 |
+
| 0.1775 | 2.8569 | 16350 | 0.2816 |
|
101 |
+
| 0.1957 | 2.8656 | 16400 | 0.2827 |
|
102 |
+
| 0.2197 | 2.8744 | 16450 | 0.2794 |
|
103 |
+
| 0.1894 | 2.8831 | 16500 | 0.2805 |
|
104 |
+
| 0.1957 | 2.8918 | 16550 | 0.2797 |
|
105 |
+
| 0.1995 | 2.9006 | 16600 | 0.2804 |
|
106 |
+
| 0.2178 | 2.9093 | 16650 | 0.2803 |
|
107 |
+
| 0.1941 | 2.9180 | 16700 | 0.2797 |
|
108 |
+
| 0.1752 | 2.9268 | 16750 | 0.2819 |
|
109 |
+
| 0.1646 | 2.9355 | 16800 | 0.2816 |
|
110 |
+
| 0.1873 | 2.9443 | 16850 | 0.2799 |
|
111 |
+
| 0.1964 | 2.9530 | 16900 | 0.2800 |
|
112 |
+
| 0.1828 | 2.9617 | 16950 | 0.2798 |
|
113 |
+
| 0.1835 | 2.9705 | 17000 | 0.2798 |
|
114 |
+
| 0.1946 | 2.9792 | 17050 | 0.2799 |
|
115 |
+
| 0.1925 | 2.9879 | 17100 | 0.2784 |
|
116 |
+
| 0.1731 | 2.9967 | 17150 | 0.2792 |
|
117 |
+
| 0.1531 | 3.0054 | 17200 | 0.2895 |
|
118 |
+
| 0.1187 | 3.0142 | 17250 | 0.2989 |
|
119 |
+
| 0.127 | 3.0229 | 17300 | 0.3017 |
|
120 |
+
| 0.1143 | 3.0316 | 17350 | 0.3015 |
|
121 |
+
| 0.1457 | 3.0404 | 17400 | 0.3004 |
|
122 |
+
| 0.1208 | 3.0491 | 17450 | 0.3014 |
|
123 |
+
| 0.1286 | 3.0578 | 17500 | 0.3010 |
|
124 |
+
| 0.1481 | 3.0666 | 17550 | 0.3007 |
|
125 |
+
| 0.1351 | 3.0753 | 17600 | 0.2996 |
|
126 |
+
| 0.1232 | 3.0840 | 17650 | 0.2998 |
|
127 |
+
| 0.1329 | 3.0928 | 17700 | 0.3002 |
|
128 |
+
| 0.1289 | 3.1015 | 17750 | 0.3037 |
|
129 |
+
| 0.1188 | 3.1103 | 17800 | 0.3022 |
|
130 |
+
| 0.1392 | 3.1190 | 17850 | 0.3011 |
|
131 |
+
| 0.1344 | 3.1277 | 17900 | 0.3011 |
|
132 |
+
| 0.1144 | 3.1365 | 17950 | 0.3013 |
|
133 |
+
| 0.1238 | 3.1452 | 18000 | 0.3000 |
|
134 |
+
| 0.1273 | 3.1539 | 18050 | 0.3016 |
|
135 |
+
| 0.1218 | 3.1627 | 18100 | 0.3014 |
|
136 |
+
| 0.1331 | 3.1714 | 18150 | 0.3015 |
|
137 |
+
| 0.1297 | 3.1802 | 18200 | 0.3001 |
|
138 |
+
| 0.1143 | 3.1889 | 18250 | 0.3005 |
|
139 |
+
| 0.1239 | 3.1976 | 18300 | 0.2992 |
|
140 |
+
| 0.1363 | 3.2064 | 18350 | 0.3002 |
|
141 |
+
| 0.1308 | 3.2151 | 18400 | 0.2988 |
|
142 |
+
| 0.1362 | 3.2238 | 18450 | 0.3004 |
|
143 |
+
| 0.1242 | 3.2326 | 18500 | 0.2997 |
|
144 |
+
| 0.1316 | 3.2413 | 18550 | 0.3010 |
|
145 |
+
| 0.1179 | 3.2500 | 18600 | 0.3029 |
|
146 |
+
| 0.1366 | 3.2588 | 18650 | 0.3031 |
|
147 |
+
| 0.1392 | 3.2675 | 18700 | 0.2982 |
|
148 |
+
| 0.1294 | 3.2763 | 18750 | 0.2981 |
|
149 |
+
| 0.1369 | 3.2850 | 18800 | 0.2979 |
|
150 |
+
| 0.1271 | 3.2937 | 18850 | 0.3006 |
|
151 |
+
| 0.1336 | 3.3025 | 18900 | 0.2996 |
|
152 |
+
| 0.1324 | 3.3112 | 18950 | 0.3014 |
|
153 |
+
| 0.1195 | 3.3199 | 19000 | 0.2994 |
|
154 |
+
| 0.1447 | 3.3287 | 19050 | 0.2964 |
|
155 |
+
| 0.1376 | 3.3374 | 19100 | 0.2978 |
|
156 |
+
| 0.124 | 3.3461 | 19150 | 0.2986 |
|
157 |
+
| 0.1312 | 3.3549 | 19200 | 0.2994 |
|
158 |
+
| 0.1291 | 3.3636 | 19250 | 0.2993 |
|
159 |
+
| 0.133 | 3.3724 | 19300 | 0.2972 |
|
160 |
+
| 0.1265 | 3.3811 | 19350 | 0.2972 |
|
161 |
+
| 0.1298 | 3.3898 | 19400 | 0.2987 |
|
162 |
+
| 0.1345 | 3.3986 | 19450 | 0.2981 |
|
163 |
+
| 0.1374 | 3.4073 | 19500 | 0.2973 |
|
164 |
+
| 0.1232 | 3.4160 | 19550 | 0.2965 |
|
165 |
+
| 0.1217 | 3.4248 | 19600 | 0.2968 |
|
166 |
+
| 0.1156 | 3.4335 | 19650 | 0.2987 |
|
167 |
+
| 0.1126 | 3.4423 | 19700 | 0.2981 |
|
168 |
+
| 0.1301 | 3.4510 | 19750 | 0.2992 |
|
169 |
+
| 0.1216 | 3.4597 | 19800 | 0.2990 |
|
170 |
+
| 0.1246 | 3.4685 | 19850 | 0.2984 |
|
171 |
+
| 0.14 | 3.4772 | 19900 | 0.2960 |
|
172 |
+
| 0.1347 | 3.4859 | 19950 | 0.2974 |
|
173 |
+
| 0.1282 | 3.4947 | 20000 | 0.2986 |
|
174 |
+
| 0.1182 | 3.5034 | 20050 | 0.2987 |
|
175 |
+
| 0.1185 | 3.5121 | 20100 | 0.2987 |
|
176 |
+
| 0.1406 | 3.5209 | 20150 | 0.2966 |
|
177 |
+
| 0.1336 | 3.5296 | 20200 | 0.2968 |
|
178 |
+
| 0.1354 | 3.5384 | 20250 | 0.2975 |
|
179 |
+
| 0.1283 | 3.5471 | 20300 | 0.2976 |
|
180 |
+
| 0.1295 | 3.5558 | 20350 | 0.2988 |
|
181 |
+
| 0.1226 | 3.5646 | 20400 | 0.2985 |
|
182 |
+
| 0.1143 | 3.5733 | 20450 | 0.2971 |
|
183 |
+
| 0.1302 | 3.5820 | 20500 | 0.2968 |
|
184 |
+
| 0.1353 | 3.5908 | 20550 | 0.2986 |
|
185 |
+
| 0.1378 | 3.5995 | 20600 | 0.2991 |
|
186 |
+
| 0.1341 | 3.6082 | 20650 | 0.2978 |
|
187 |
+
| 0.1269 | 3.6170 | 20700 | 0.2972 |
|
188 |
+
| 0.1236 | 3.6257 | 20750 | 0.2969 |
|
189 |
+
| 0.1253 | 3.6345 | 20800 | 0.2988 |
|
190 |
+
| 0.1147 | 3.6432 | 20850 | 0.2985 |
|
191 |
+
| 0.1266 | 3.6519 | 20900 | 0.2986 |
|
192 |
+
| 0.1259 | 3.6607 | 20950 | 0.2982 |
|
193 |
+
| 0.1158 | 3.6694 | 21000 | 0.2989 |
|
194 |
+
| 0.1222 | 3.6781 | 21050 | 0.2980 |
|
195 |
+
| 0.1295 | 3.6869 | 21100 | 0.2971 |
|
196 |
+
| 0.1263 | 3.6956 | 21150 | 0.2972 |
|
197 |
+
| 0.1254 | 3.7044 | 21200 | 0.2965 |
|
198 |
+
| 0.1204 | 3.7131 | 21250 | 0.2964 |
|
199 |
+
| 0.1283 | 3.7218 | 21300 | 0.2955 |
|
200 |
+
| 0.1256 | 3.7306 | 21350 | 0.2959 |
|
201 |
+
| 0.1375 | 3.7393 | 21400 | 0.2963 |
|
202 |
+
| 0.1206 | 3.7480 | 21450 | 0.2965 |
|
203 |
+
| 0.1309 | 3.7568 | 21500 | 0.2973 |
|
204 |
+
| 0.1173 | 3.7655 | 21550 | 0.2994 |
|
205 |
+
| 0.117 | 3.7742 | 21600 | 0.2989 |
|
206 |
+
| 0.1249 | 3.7830 | 21650 | 0.2995 |
|
207 |
+
| 0.129 | 3.7917 | 21700 | 0.2979 |
|
208 |
+
| 0.1149 | 3.8005 | 21750 | 0.2981 |
|
209 |
+
| 0.1262 | 3.8092 | 21800 | 0.2982 |
|
210 |
+
| 0.1113 | 3.8179 | 21850 | 0.2984 |
|
211 |
+
| 0.1135 | 3.8267 | 21900 | 0.2980 |
|
212 |
+
| 0.1187 | 3.8354 | 21950 | 0.2972 |
|
213 |
+
| 0.1161 | 3.8441 | 22000 | 0.2974 |
|
214 |
+
| 0.131 | 3.8529 | 22050 | 0.2974 |
|
215 |
+
| 0.1232 | 3.8616 | 22100 | 0.2980 |
|
216 |
+
| 0.1188 | 3.8703 | 22150 | 0.2978 |
|
217 |
+
| 0.1335 | 3.8791 | 22200 | 0.2977 |
|
218 |
+
| 0.1375 | 3.8878 | 22250 | 0.2966 |
|
219 |
+
| 0.1183 | 3.8966 | 22300 | 0.2969 |
|
220 |
+
| 0.1139 | 3.9053 | 22350 | 0.2969 |
|
221 |
+
| 0.1314 | 3.9140 | 22400 | 0.2970 |
|
222 |
+
| 0.139 | 3.9228 | 22450 | 0.2969 |
|
223 |
+
| 0.1296 | 3.9315 | 22500 | 0.2973 |
|
224 |
+
| 0.1286 | 3.9402 | 22550 | 0.2969 |
|
225 |
+
| 0.1248 | 3.9490 | 22600 | 0.2967 |
|
226 |
+
| 0.1115 | 3.9577 | 22650 | 0.2968 |
|
227 |
+
| 0.1195 | 3.9665 | 22700 | 0.2968 |
|
228 |
+
| 0.1163 | 3.9752 | 22750 | 0.2967 |
|
229 |
+
| 0.1305 | 3.9839 | 22800 | 0.2966 |
|
230 |
+
| 0.119 | 3.9927 | 22850 | 0.2966 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
231 |
|
232 |
|
233 |
### Framework versions
|
234 |
|
235 |
- PEFT 0.11.1
|
236 |
+
- Transformers 4.42.3
|
237 |
- Pytorch 2.3.0+cu121
|
238 |
- Datasets 2.20.0
|
239 |
- Tokenizers 0.19.1
|
adapter_config.json
CHANGED
@@ -21,12 +21,12 @@
|
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
"up_proj",
|
24 |
-
"o_proj",
|
25 |
"k_proj",
|
|
|
26 |
"down_proj",
|
27 |
"v_proj",
|
28 |
-
"
|
29 |
-
"
|
30 |
],
|
31 |
"task_type": "CAUSAL_LM",
|
32 |
"use_dora": false,
|
|
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
"up_proj",
|
|
|
24 |
"k_proj",
|
25 |
+
"o_proj",
|
26 |
"down_proj",
|
27 |
"v_proj",
|
28 |
+
"gate_proj",
|
29 |
+
"q_proj"
|
30 |
],
|
31 |
"task_type": "CAUSAL_LM",
|
32 |
"use_dora": false,
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2332095256
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:be0325a9c300a3c5c95ec26d42825b2d712968e48f141bedc1cb1870c4da6c35
|
3 |
size 2332095256
|
runs/Jul09_05-57-56_2e02f1ec7e57/events.out.tfevents.1720504688.2e02f1ec7e57.984.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0e0d6345abd2cb598de8de2f4e04f755be171561171bc233cc5192c0ea3355a6
|
3 |
+
size 151936
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5432
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bd7c01af48d5b09918b87d696a87bb1b76b50c5ffeb3cd440761c027325f0aea
|
3 |
size 5432
|