End of training
Browse files
README.md
CHANGED
@@ -1,11 +1,11 @@
|
|
1 |
---
|
2 |
-
|
3 |
library_name: peft
|
|
|
4 |
tags:
|
5 |
- trl
|
6 |
- sft
|
7 |
- generated_from_trainer
|
8 |
-
base_model: codellama/CodeLlama-7b-Instruct-hf
|
9 |
model-index:
|
10 |
- name: Codellama-7b-lora-rps-adapter
|
11 |
results: []
|
@@ -18,7 +18,7 @@ should probably proofread and complete it, then remove this comment. -->
|
|
18 |
|
19 |
This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
-
- Loss: 0.
|
22 |
|
23 |
## Model description
|
24 |
|
@@ -50,244 +50,218 @@ The following hyperparameters were used during training:
|
|
50 |
|
51 |
| Training Loss | Epoch | Step | Validation Loss |
|
52 |
|:-------------:|:------:|:-----:|:---------------:|
|
53 |
-
| 0.
|
54 |
-
| 0.
|
55 |
-
| 0.
|
56 |
-
| 0.
|
57 |
-
| 0.
|
58 |
-
| 0.
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.
|
62 |
-
| 0.
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
81 |
-
| 0.
|
82 |
-
| 0.
|
83 |
-
| 0.
|
84 |
-
| 0.
|
85 |
-
| 0.
|
86 |
-
| 0.
|
87 |
-
| 0.
|
88 |
-
| 0.
|
89 |
-
| 0.
|
90 |
-
| 0.
|
91 |
-
| 0.
|
92 |
-
| 0.
|
93 |
-
| 0.
|
94 |
-
| 0.
|
95 |
-
| 0.
|
96 |
-
| 0.
|
97 |
-
| 0.
|
98 |
-
| 0.
|
99 |
-
| 0.
|
100 |
-
| 0.
|
101 |
-
| 0.
|
102 |
-
| 0.
|
103 |
-
| 0.
|
104 |
-
| 0.
|
105 |
-
| 0.
|
106 |
-
| 0.
|
107 |
-
| 0.
|
108 |
-
| 0.
|
109 |
-
| 0.
|
110 |
-
| 0.
|
111 |
-
| 0.
|
112 |
-
| 0.
|
113 |
-
| 0.
|
114 |
-
| 0.
|
115 |
-
| 0.
|
116 |
-
| 0.
|
117 |
-
| 0.
|
118 |
-
| 0.
|
119 |
-
| 0.
|
120 |
-
| 0.
|
121 |
-
| 0.
|
122 |
-
| 0.
|
123 |
-
| 0.
|
124 |
-
| 0.
|
125 |
-
| 0.
|
126 |
-
| 0.
|
127 |
-
| 0.
|
128 |
-
| 0.
|
129 |
-
| 0.
|
130 |
-
| 0.
|
131 |
-
| 0.
|
132 |
-
| 0.
|
133 |
-
| 0.
|
134 |
-
| 0.
|
135 |
-
| 0.
|
136 |
-
| 0.
|
137 |
-
| 0.
|
138 |
-
| 0.
|
139 |
-
| 0.
|
140 |
-
| 0.
|
141 |
-
| 0.
|
142 |
-
| 0.
|
143 |
-
| 0.
|
144 |
-
| 0.
|
145 |
-
| 0.
|
146 |
-
| 0.
|
147 |
-
| 0.
|
148 |
-
| 0.
|
149 |
-
| 0.
|
150 |
-
| 0.
|
151 |
-
| 0.
|
152 |
-
| 0.
|
153 |
-
| 0.
|
154 |
-
| 0.
|
155 |
-
| 0.
|
156 |
-
| 0.
|
157 |
-
| 0.
|
158 |
-
| 0.
|
159 |
-
| 0.
|
160 |
-
| 0.
|
161 |
-
| 0.
|
162 |
-
| 0.
|
163 |
-
| 0.
|
164 |
-
| 0.
|
165 |
-
| 0.
|
166 |
-
| 0.
|
167 |
-
| 0.
|
168 |
-
| 0.
|
169 |
-
| 0.
|
170 |
-
| 0.
|
171 |
-
| 0.
|
172 |
-
| 0.
|
173 |
-
| 0.
|
174 |
-
| 0.
|
175 |
-
| 0.
|
176 |
-
| 0.
|
177 |
-
| 0.
|
178 |
-
| 0.
|
179 |
-
| 0.
|
180 |
-
| 0.
|
181 |
-
| 0.
|
182 |
-
| 0.
|
183 |
-
| 0.
|
184 |
-
| 0.
|
185 |
-
| 0.
|
186 |
-
| 0.
|
187 |
-
| 0.
|
188 |
-
| 0.
|
189 |
-
| 0.
|
190 |
-
| 0.
|
191 |
-
| 0.
|
192 |
-
| 0.
|
193 |
-
| 0.
|
194 |
-
| 0.
|
195 |
-
| 0.
|
196 |
-
| 0.
|
197 |
-
| 0.
|
198 |
-
| 0.
|
199 |
-
| 0.
|
200 |
-
| 0.
|
201 |
-
| 0.
|
202 |
-
| 0.
|
203 |
-
| 0.
|
204 |
-
| 0.
|
205 |
-
| 0.
|
206 |
-
| 0.
|
207 |
-
| 0.
|
208 |
-
| 0.
|
209 |
-
| 0.
|
210 |
-
| 0.
|
211 |
-
| 0.
|
212 |
-
| 0.
|
213 |
-
| 0.
|
214 |
-
| 0.
|
215 |
-
| 0.
|
216 |
-
| 0.
|
217 |
-
| 0.
|
218 |
-
| 0.
|
219 |
-
| 0.
|
220 |
-
| 0.
|
221 |
-
| 0.
|
222 |
-
| 0.
|
223 |
-
| 0.
|
224 |
-
| 0.
|
225 |
-
| 0.
|
226 |
-
| 0.
|
227 |
-
| 0.
|
228 |
-
| 0.
|
229 |
-
| 0.
|
230 |
-
| 0.
|
231 |
-
| 0.
|
232 |
-
| 0.
|
233 |
-
| 0.
|
234 |
-
| 0.
|
235 |
-
| 0.
|
236 |
-
| 0.
|
237 |
-
| 0.
|
238 |
-
| 0.
|
239 |
-
| 0.
|
240 |
-
| 0.
|
241 |
-
| 0.
|
242 |
-
| 0.
|
243 |
-
| 0.
|
244 |
-
| 0.
|
245 |
-
| 0.
|
246 |
-
| 0.
|
247 |
-
| 0.
|
248 |
-
| 0.
|
249 |
-
| 0.
|
250 |
-
| 0.
|
251 |
-
| 0.
|
252 |
-
| 0.
|
253 |
-
| 0.
|
254 |
-
| 0.
|
255 |
-
| 0.
|
256 |
-
| 0.
|
257 |
-
| 0.
|
258 |
-
| 0.
|
259 |
-
| 0.1228 | 3.7655 | 20300 | 0.3070 |
|
260 |
-
| 0.1199 | 3.7748 | 20350 | 0.3082 |
|
261 |
-
| 0.1307 | 3.7841 | 20400 | 0.3079 |
|
262 |
-
| 0.1234 | 3.7934 | 20450 | 0.3071 |
|
263 |
-
| 0.1346 | 3.8026 | 20500 | 0.3074 |
|
264 |
-
| 0.1115 | 3.8119 | 20550 | 0.3074 |
|
265 |
-
| 0.1183 | 3.8212 | 20600 | 0.3067 |
|
266 |
-
| 0.1185 | 3.8305 | 20650 | 0.3066 |
|
267 |
-
| 0.1203 | 3.8397 | 20700 | 0.3074 |
|
268 |
-
| 0.1262 | 3.8490 | 20750 | 0.3078 |
|
269 |
-
| 0.1232 | 3.8583 | 20800 | 0.3077 |
|
270 |
-
| 0.1196 | 3.8676 | 20850 | 0.3075 |
|
271 |
-
| 0.1217 | 3.8768 | 20900 | 0.3071 |
|
272 |
-
| 0.1392 | 3.8861 | 20950 | 0.3068 |
|
273 |
-
| 0.117 | 3.8954 | 21000 | 0.3065 |
|
274 |
-
| 0.1396 | 3.9047 | 21050 | 0.3058 |
|
275 |
-
| 0.1149 | 3.9139 | 21100 | 0.3061 |
|
276 |
-
| 0.1235 | 3.9232 | 21150 | 0.3062 |
|
277 |
-
| 0.125 | 3.9325 | 21200 | 0.3064 |
|
278 |
-
| 0.1272 | 3.9418 | 21250 | 0.3060 |
|
279 |
-
| 0.1189 | 3.9510 | 21300 | 0.3062 |
|
280 |
-
| 0.1132 | 3.9603 | 21350 | 0.3064 |
|
281 |
-
| 0.1221 | 3.9696 | 21400 | 0.3063 |
|
282 |
-
| 0.1283 | 3.9789 | 21450 | 0.3064 |
|
283 |
-
| 0.135 | 3.9881 | 21500 | 0.3063 |
|
284 |
-
| 0.1207 | 3.9974 | 21550 | 0.3064 |
|
285 |
|
286 |
|
287 |
### Framework versions
|
288 |
|
289 |
- PEFT 0.11.1
|
290 |
-
- Transformers 4.
|
291 |
- Pytorch 2.3.0+cu121
|
292 |
- Datasets 2.20.0
|
293 |
- Tokenizers 0.19.1
|
|
|
1 |
---
|
2 |
+
base_model: codellama/CodeLlama-7b-Instruct-hf
|
3 |
library_name: peft
|
4 |
+
license: llama2
|
5 |
tags:
|
6 |
- trl
|
7 |
- sft
|
8 |
- generated_from_trainer
|
|
|
9 |
model-index:
|
10 |
- name: Codellama-7b-lora-rps-adapter
|
11 |
results: []
|
|
|
18 |
|
19 |
This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
+
- Loss: 0.3016
|
22 |
|
23 |
## Model description
|
24 |
|
|
|
50 |
|
51 |
| Training Loss | Epoch | Step | Validation Loss |
|
52 |
|:-------------:|:------:|:-----:|:---------------:|
|
53 |
+
| 0.1765 | 2.1556 | 12000 | 0.2957 |
|
54 |
+
| 0.1764 | 2.1645 | 12050 | 0.2960 |
|
55 |
+
| 0.1917 | 2.1735 | 12100 | 0.2960 |
|
56 |
+
| 0.1775 | 2.1825 | 12150 | 0.2940 |
|
57 |
+
| 0.2042 | 2.1915 | 12200 | 0.2944 |
|
58 |
+
| 0.1866 | 2.2005 | 12250 | 0.2959 |
|
59 |
+
| 0.1856 | 2.2094 | 12300 | 0.2961 |
|
60 |
+
| 0.1923 | 2.2184 | 12350 | 0.2951 |
|
61 |
+
| 0.1797 | 2.2274 | 12400 | 0.2968 |
|
62 |
+
| 0.177 | 2.2364 | 12450 | 0.2945 |
|
63 |
+
| 0.1815 | 2.2454 | 12500 | 0.2970 |
|
64 |
+
| 0.1947 | 2.2544 | 12550 | 0.2935 |
|
65 |
+
| 0.1985 | 2.2633 | 12600 | 0.2937 |
|
66 |
+
| 0.1828 | 2.2723 | 12650 | 0.2945 |
|
67 |
+
| 0.1865 | 2.2813 | 12700 | 0.2939 |
|
68 |
+
| 0.1855 | 2.2903 | 12750 | 0.2939 |
|
69 |
+
| 0.2069 | 2.2993 | 12800 | 0.2946 |
|
70 |
+
| 0.1933 | 2.3082 | 12850 | 0.2939 |
|
71 |
+
| 0.1953 | 2.3172 | 12900 | 0.2954 |
|
72 |
+
| 0.2116 | 2.3262 | 12950 | 0.2931 |
|
73 |
+
| 0.1992 | 2.3352 | 13000 | 0.2934 |
|
74 |
+
| 0.1854 | 2.3442 | 13050 | 0.2944 |
|
75 |
+
| 0.1839 | 2.3532 | 13100 | 0.2933 |
|
76 |
+
| 0.1902 | 2.3621 | 13150 | 0.2937 |
|
77 |
+
| 0.1934 | 2.3711 | 13200 | 0.2926 |
|
78 |
+
| 0.1868 | 2.3801 | 13250 | 0.2928 |
|
79 |
+
| 0.2005 | 2.3891 | 13300 | 0.2915 |
|
80 |
+
| 0.1946 | 2.3981 | 13350 | 0.2936 |
|
81 |
+
| 0.1772 | 2.4070 | 13400 | 0.2939 |
|
82 |
+
| 0.2018 | 2.4160 | 13450 | 0.2922 |
|
83 |
+
| 0.1889 | 2.4250 | 13500 | 0.2922 |
|
84 |
+
| 0.1951 | 2.4340 | 13550 | 0.2920 |
|
85 |
+
| 0.2053 | 2.4430 | 13600 | 0.2905 |
|
86 |
+
| 0.181 | 2.4519 | 13650 | 0.2910 |
|
87 |
+
| 0.2096 | 2.4609 | 13700 | 0.2897 |
|
88 |
+
| 0.197 | 2.4699 | 13750 | 0.2915 |
|
89 |
+
| 0.2021 | 2.4789 | 13800 | 0.2896 |
|
90 |
+
| 0.1829 | 2.4879 | 13850 | 0.2899 |
|
91 |
+
| 0.1843 | 2.4969 | 13900 | 0.2896 |
|
92 |
+
| 0.1675 | 2.5058 | 13950 | 0.2913 |
|
93 |
+
| 0.1825 | 2.5148 | 14000 | 0.2906 |
|
94 |
+
| 0.198 | 2.5238 | 14050 | 0.2908 |
|
95 |
+
| 0.1997 | 2.5328 | 14100 | 0.2900 |
|
96 |
+
| 0.1913 | 2.5418 | 14150 | 0.2892 |
|
97 |
+
| 0.1888 | 2.5507 | 14200 | 0.2906 |
|
98 |
+
| 0.1969 | 2.5597 | 14250 | 0.2887 |
|
99 |
+
| 0.19 | 2.5687 | 14300 | 0.2887 |
|
100 |
+
| 0.1918 | 2.5777 | 14350 | 0.2895 |
|
101 |
+
| 0.1818 | 2.5867 | 14400 | 0.2891 |
|
102 |
+
| 0.1932 | 2.5957 | 14450 | 0.2883 |
|
103 |
+
| 0.2034 | 2.6046 | 14500 | 0.2869 |
|
104 |
+
| 0.1919 | 2.6136 | 14550 | 0.2881 |
|
105 |
+
| 0.1849 | 2.6226 | 14600 | 0.2887 |
|
106 |
+
| 0.185 | 2.6316 | 14650 | 0.2880 |
|
107 |
+
| 0.1702 | 2.6406 | 14700 | 0.2880 |
|
108 |
+
| 0.1861 | 2.6495 | 14750 | 0.2874 |
|
109 |
+
| 0.1975 | 2.6585 | 14800 | 0.2873 |
|
110 |
+
| 0.1651 | 2.6675 | 14850 | 0.2867 |
|
111 |
+
| 0.1855 | 2.6765 | 14900 | 0.2866 |
|
112 |
+
| 0.172 | 2.6855 | 14950 | 0.2886 |
|
113 |
+
| 0.1954 | 2.6944 | 15000 | 0.2868 |
|
114 |
+
| 0.2054 | 2.7034 | 15050 | 0.2852 |
|
115 |
+
| 0.1682 | 2.7124 | 15100 | 0.2864 |
|
116 |
+
| 0.1935 | 2.7214 | 15150 | 0.2874 |
|
117 |
+
| 0.1846 | 2.7304 | 15200 | 0.2871 |
|
118 |
+
| 0.174 | 2.7394 | 15250 | 0.2857 |
|
119 |
+
| 0.1885 | 2.7483 | 15300 | 0.2879 |
|
120 |
+
| 0.1906 | 2.7573 | 15350 | 0.2864 |
|
121 |
+
| 0.1714 | 2.7663 | 15400 | 0.2864 |
|
122 |
+
| 0.173 | 2.7753 | 15450 | 0.2873 |
|
123 |
+
| 0.1876 | 2.7843 | 15500 | 0.2861 |
|
124 |
+
| 0.1635 | 2.7932 | 15550 | 0.2858 |
|
125 |
+
| 0.1855 | 2.8022 | 15600 | 0.2876 |
|
126 |
+
| 0.1864 | 2.8112 | 15650 | 0.2873 |
|
127 |
+
| 0.1825 | 2.8202 | 15700 | 0.2855 |
|
128 |
+
| 0.188 | 2.8292 | 15750 | 0.2868 |
|
129 |
+
| 0.192 | 2.8382 | 15800 | 0.2851 |
|
130 |
+
| 0.2082 | 2.8471 | 15850 | 0.2847 |
|
131 |
+
| 0.1864 | 2.8561 | 15900 | 0.2860 |
|
132 |
+
| 0.1677 | 2.8651 | 15950 | 0.2853 |
|
133 |
+
| 0.1829 | 2.8741 | 16000 | 0.2849 |
|
134 |
+
| 0.1729 | 2.8831 | 16050 | 0.2852 |
|
135 |
+
| 0.1948 | 2.8920 | 16100 | 0.2829 |
|
136 |
+
| 0.1709 | 2.9010 | 16150 | 0.2845 |
|
137 |
+
| 0.1869 | 2.9100 | 16200 | 0.2856 |
|
138 |
+
| 0.1938 | 2.9190 | 16250 | 0.2853 |
|
139 |
+
| 0.1892 | 2.9280 | 16300 | 0.2844 |
|
140 |
+
| 0.1875 | 2.9369 | 16350 | 0.2865 |
|
141 |
+
| 0.1754 | 2.9459 | 16400 | 0.2847 |
|
142 |
+
| 0.1697 | 2.9549 | 16450 | 0.2856 |
|
143 |
+
| 0.1803 | 2.9639 | 16500 | 0.2859 |
|
144 |
+
| 0.1747 | 2.9729 | 16550 | 0.2844 |
|
145 |
+
| 0.2116 | 2.9819 | 16600 | 0.2841 |
|
146 |
+
| 0.1825 | 2.9908 | 16650 | 0.2833 |
|
147 |
+
| 0.1775 | 2.9998 | 16700 | 0.2860 |
|
148 |
+
| 0.1425 | 3.0088 | 16750 | 0.3030 |
|
149 |
+
| 0.124 | 3.0178 | 16800 | 0.3037 |
|
150 |
+
| 0.1364 | 3.0268 | 16850 | 0.3080 |
|
151 |
+
| 0.1234 | 3.0357 | 16900 | 0.3050 |
|
152 |
+
| 0.1264 | 3.0447 | 16950 | 0.3073 |
|
153 |
+
| 0.1289 | 3.0537 | 17000 | 0.3057 |
|
154 |
+
| 0.1391 | 3.0627 | 17050 | 0.3055 |
|
155 |
+
| 0.1258 | 3.0717 | 17100 | 0.3047 |
|
156 |
+
| 0.1429 | 3.0807 | 17150 | 0.3076 |
|
157 |
+
| 0.1213 | 3.0896 | 17200 | 0.3058 |
|
158 |
+
| 0.1273 | 3.0986 | 17250 | 0.3064 |
|
159 |
+
| 0.1259 | 3.1076 | 17300 | 0.3061 |
|
160 |
+
| 0.1418 | 3.1166 | 17350 | 0.3033 |
|
161 |
+
| 0.1238 | 3.1256 | 17400 | 0.3065 |
|
162 |
+
| 0.1231 | 3.1345 | 17450 | 0.3078 |
|
163 |
+
| 0.1289 | 3.1435 | 17500 | 0.3074 |
|
164 |
+
| 0.1242 | 3.1525 | 17550 | 0.3043 |
|
165 |
+
| 0.1203 | 3.1615 | 17600 | 0.3044 |
|
166 |
+
| 0.1298 | 3.1705 | 17650 | 0.3058 |
|
167 |
+
| 0.1278 | 3.1795 | 17700 | 0.3051 |
|
168 |
+
| 0.1283 | 3.1884 | 17750 | 0.3056 |
|
169 |
+
| 0.1325 | 3.1974 | 17800 | 0.3058 |
|
170 |
+
| 0.1178 | 3.2064 | 17850 | 0.3064 |
|
171 |
+
| 0.1196 | 3.2154 | 17900 | 0.3049 |
|
172 |
+
| 0.1331 | 3.2244 | 17950 | 0.3048 |
|
173 |
+
| 0.1209 | 3.2333 | 18000 | 0.3068 |
|
174 |
+
| 0.1166 | 3.2423 | 18050 | 0.3059 |
|
175 |
+
| 0.1196 | 3.2513 | 18100 | 0.3057 |
|
176 |
+
| 0.1274 | 3.2603 | 18150 | 0.3051 |
|
177 |
+
| 0.1336 | 3.2693 | 18200 | 0.3044 |
|
178 |
+
| 0.1495 | 3.2782 | 18250 | 0.3026 |
|
179 |
+
| 0.1169 | 3.2872 | 18300 | 0.3047 |
|
180 |
+
| 0.1258 | 3.2962 | 18350 | 0.3042 |
|
181 |
+
| 0.1267 | 3.3052 | 18400 | 0.3045 |
|
182 |
+
| 0.1252 | 3.3142 | 18450 | 0.3040 |
|
183 |
+
| 0.1342 | 3.3232 | 18500 | 0.3031 |
|
184 |
+
| 0.1285 | 3.3321 | 18550 | 0.3041 |
|
185 |
+
| 0.1281 | 3.3411 | 18600 | 0.3027 |
|
186 |
+
| 0.1181 | 3.3501 | 18650 | 0.3021 |
|
187 |
+
| 0.132 | 3.3591 | 18700 | 0.3035 |
|
188 |
+
| 0.1328 | 3.3681 | 18750 | 0.3055 |
|
189 |
+
| 0.1233 | 3.3770 | 18800 | 0.3033 |
|
190 |
+
| 0.1241 | 3.3860 | 18850 | 0.3046 |
|
191 |
+
| 0.1139 | 3.3950 | 18900 | 0.3042 |
|
192 |
+
| 0.1471 | 3.4040 | 18950 | 0.3047 |
|
193 |
+
| 0.1207 | 3.4130 | 19000 | 0.3047 |
|
194 |
+
| 0.1155 | 3.4220 | 19050 | 0.3054 |
|
195 |
+
| 0.1198 | 3.4309 | 19100 | 0.3051 |
|
196 |
+
| 0.119 | 3.4399 | 19150 | 0.3041 |
|
197 |
+
| 0.1304 | 3.4489 | 19200 | 0.3040 |
|
198 |
+
| 0.1275 | 3.4579 | 19250 | 0.3033 |
|
199 |
+
| 0.1226 | 3.4669 | 19300 | 0.3031 |
|
200 |
+
| 0.1361 | 3.4758 | 19350 | 0.3048 |
|
201 |
+
| 0.119 | 3.4848 | 19400 | 0.3067 |
|
202 |
+
| 0.1207 | 3.4938 | 19450 | 0.3042 |
|
203 |
+
| 0.1251 | 3.5028 | 19500 | 0.3029 |
|
204 |
+
| 0.126 | 3.5118 | 19550 | 0.3052 |
|
205 |
+
| 0.1291 | 3.5207 | 19600 | 0.3030 |
|
206 |
+
| 0.1152 | 3.5297 | 19650 | 0.3041 |
|
207 |
+
| 0.1229 | 3.5387 | 19700 | 0.3018 |
|
208 |
+
| 0.1253 | 3.5477 | 19750 | 0.3034 |
|
209 |
+
| 0.1378 | 3.5567 | 19800 | 0.3043 |
|
210 |
+
| 0.1514 | 3.5657 | 19850 | 0.3014 |
|
211 |
+
| 0.1271 | 3.5746 | 19900 | 0.3028 |
|
212 |
+
| 0.1306 | 3.5836 | 19950 | 0.3016 |
|
213 |
+
| 0.12 | 3.5926 | 20000 | 0.3019 |
|
214 |
+
| 0.1247 | 3.6016 | 20050 | 0.3012 |
|
215 |
+
| 0.1206 | 3.6106 | 20100 | 0.3018 |
|
216 |
+
| 0.1321 | 3.6195 | 20150 | 0.3007 |
|
217 |
+
| 0.1147 | 3.6285 | 20200 | 0.3007 |
|
218 |
+
| 0.1184 | 3.6375 | 20250 | 0.3016 |
|
219 |
+
| 0.1275 | 3.6465 | 20300 | 0.3028 |
|
220 |
+
| 0.133 | 3.6555 | 20350 | 0.3033 |
|
221 |
+
| 0.1316 | 3.6645 | 20400 | 0.3033 |
|
222 |
+
| 0.1304 | 3.6734 | 20450 | 0.3041 |
|
223 |
+
| 0.1407 | 3.6824 | 20500 | 0.3024 |
|
224 |
+
| 0.1166 | 3.6914 | 20550 | 0.3023 |
|
225 |
+
| 0.1228 | 3.7004 | 20600 | 0.3027 |
|
226 |
+
| 0.1251 | 3.7094 | 20650 | 0.3012 |
|
227 |
+
| 0.1218 | 3.7183 | 20700 | 0.3022 |
|
228 |
+
| 0.1158 | 3.7273 | 20750 | 0.3032 |
|
229 |
+
| 0.1287 | 3.7363 | 20800 | 0.3029 |
|
230 |
+
| 0.1103 | 3.7453 | 20850 | 0.3032 |
|
231 |
+
| 0.1172 | 3.7543 | 20900 | 0.3030 |
|
232 |
+
| 0.1251 | 3.7632 | 20950 | 0.3035 |
|
233 |
+
| 0.1132 | 3.7722 | 21000 | 0.3025 |
|
234 |
+
| 0.1301 | 3.7812 | 21050 | 0.3015 |
|
235 |
+
| 0.1262 | 3.7902 | 21100 | 0.3011 |
|
236 |
+
| 0.1287 | 3.7992 | 21150 | 0.3014 |
|
237 |
+
| 0.1283 | 3.8082 | 21200 | 0.3017 |
|
238 |
+
| 0.1296 | 3.8171 | 21250 | 0.3021 |
|
239 |
+
| 0.1137 | 3.8261 | 21300 | 0.3025 |
|
240 |
+
| 0.1279 | 3.8351 | 21350 | 0.3029 |
|
241 |
+
| 0.114 | 3.8441 | 21400 | 0.3023 |
|
242 |
+
| 0.1213 | 3.8531 | 21450 | 0.3019 |
|
243 |
+
| 0.1174 | 3.8620 | 21500 | 0.3016 |
|
244 |
+
| 0.1156 | 3.8710 | 21550 | 0.3019 |
|
245 |
+
| 0.1194 | 3.8800 | 21600 | 0.3017 |
|
246 |
+
| 0.1136 | 3.8890 | 21650 | 0.3018 |
|
247 |
+
| 0.124 | 3.8980 | 21700 | 0.3012 |
|
248 |
+
| 0.1204 | 3.9070 | 21750 | 0.3013 |
|
249 |
+
| 0.1348 | 3.9159 | 21800 | 0.3015 |
|
250 |
+
| 0.1237 | 3.9249 | 21850 | 0.3019 |
|
251 |
+
| 0.1213 | 3.9339 | 21900 | 0.3020 |
|
252 |
+
| 0.114 | 3.9429 | 21950 | 0.3020 |
|
253 |
+
| 0.1136 | 3.9519 | 22000 | 0.3019 |
|
254 |
+
| 0.1195 | 3.9608 | 22050 | 0.3017 |
|
255 |
+
| 0.1307 | 3.9698 | 22100 | 0.3017 |
|
256 |
+
| 0.1355 | 3.9788 | 22150 | 0.3015 |
|
257 |
+
| 0.1165 | 3.9878 | 22200 | 0.3016 |
|
258 |
+
| 0.1296 | 3.9968 | 22250 | 0.3016 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
259 |
|
260 |
|
261 |
### Framework versions
|
262 |
|
263 |
- PEFT 0.11.1
|
264 |
+
- Transformers 4.42.2
|
265 |
- Pytorch 2.3.0+cu121
|
266 |
- Datasets 2.20.0
|
267 |
- Tokenizers 0.19.1
|
adapter_config.json
CHANGED
@@ -20,13 +20,13 @@
|
|
20 |
"rank_pattern": {},
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
-
"
|
24 |
"o_proj",
|
25 |
"k_proj",
|
26 |
-
"
|
27 |
-
"gate_proj",
|
28 |
"v_proj",
|
29 |
-
"q_proj"
|
|
|
30 |
],
|
31 |
"task_type": "CAUSAL_LM",
|
32 |
"use_dora": false,
|
|
|
20 |
"rank_pattern": {},
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
+
"up_proj",
|
24 |
"o_proj",
|
25 |
"k_proj",
|
26 |
+
"down_proj",
|
|
|
27 |
"v_proj",
|
28 |
+
"q_proj",
|
29 |
+
"gate_proj"
|
30 |
],
|
31 |
"task_type": "CAUSAL_LM",
|
32 |
"use_dora": false,
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2332095256
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:126768d09aa2f429bf1bd69469a7235ac0710c3e64eb5dd77f4a14c7f52e8faa
|
3 |
size 2332095256
|
runs/Jun28_12-10-00_d5e66cb6c961/events.out.tfevents.1719576611.d5e66cb6c961.2894.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cd5a6e6c1c5e0ac6c83adf5ad782d1d66106f331016962bac25b2b4389390897
|
3 |
+
size 156884
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5432
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:df7eae5fca405b662f8cf6e5eaef54f860208ce95ed2dfb952eac4235d24c041
|
3 |
size 5432
|