End of training
Browse files
README.md
CHANGED
@@ -1,230 +1,57 @@
|
|
1 |
---
|
2 |
base_model: codellama/CodeLlama-7b-Instruct-hf
|
3 |
-
library_name:
|
4 |
-
|
5 |
tags:
|
|
|
6 |
- trl
|
7 |
- sft
|
8 |
-
|
9 |
-
model-index:
|
10 |
-
- name: Codellama-7b-lora-rps-adapter
|
11 |
-
results: []
|
12 |
---
|
13 |
|
14 |
-
|
15 |
-
should probably proofread and complete it, then remove this comment. -->
|
16 |
-
|
17 |
-
# Codellama-7b-lora-rps-adapter
|
18 |
-
|
19 |
-
This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) on the None dataset.
|
20 |
-
It achieves the following results on the evaluation set:
|
21 |
-
- Loss: 0.3066
|
22 |
|
23 |
-
|
|
|
24 |
|
25 |
-
|
26 |
|
27 |
-
|
|
|
28 |
|
29 |
-
|
|
|
|
|
|
|
|
|
30 |
|
31 |
-
## Training
|
32 |
|
33 |
-
More information needed
|
34 |
|
35 |
-
## Training procedure
|
36 |
|
37 |
-
|
38 |
|
39 |
-
|
40 |
-
- learning_rate: 0.0002
|
41 |
-
- train_batch_size: 2
|
42 |
-
- eval_batch_size: 2
|
43 |
-
- seed: 42
|
44 |
-
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
45 |
-
- lr_scheduler_type: linear
|
46 |
-
- lr_scheduler_warmup_ratio: 0.03
|
47 |
-
- num_epochs: 4
|
48 |
|
49 |
-
|
|
|
|
|
|
|
|
|
50 |
|
51 |
-
|
52 |
-
|:-------------:|:------:|:-----:|:---------------:|
|
53 |
-
| 0.197 | 2.6742 | 17000 | 0.2906 |
|
54 |
-
| 0.1777 | 2.6821 | 17050 | 0.2934 |
|
55 |
-
| 0.1949 | 2.6899 | 17100 | 0.2911 |
|
56 |
-
| 0.2131 | 2.6978 | 17150 | 0.2928 |
|
57 |
-
| 0.1839 | 2.7057 | 17200 | 0.2921 |
|
58 |
-
| 0.2039 | 2.7135 | 17250 | 0.2896 |
|
59 |
-
| 0.2187 | 2.7214 | 17300 | 0.2906 |
|
60 |
-
| 0.185 | 2.7293 | 17350 | 0.2906 |
|
61 |
-
| 0.1837 | 2.7371 | 17400 | 0.2933 |
|
62 |
-
| 0.2117 | 2.7450 | 17450 | 0.2889 |
|
63 |
-
| 0.2143 | 2.7529 | 17500 | 0.2904 |
|
64 |
-
| 0.1814 | 2.7607 | 17550 | 0.2897 |
|
65 |
-
| 0.1982 | 2.7686 | 17600 | 0.2898 |
|
66 |
-
| 0.2243 | 2.7765 | 17650 | 0.2903 |
|
67 |
-
| 0.1817 | 2.7843 | 17700 | 0.2895 |
|
68 |
-
| 0.1921 | 2.7922 | 17750 | 0.2919 |
|
69 |
-
| 0.2097 | 2.8001 | 17800 | 0.2913 |
|
70 |
-
| 0.1883 | 2.8079 | 17850 | 0.2903 |
|
71 |
-
| 0.1905 | 2.8158 | 17900 | 0.2882 |
|
72 |
-
| 0.2034 | 2.8237 | 17950 | 0.2884 |
|
73 |
-
| 0.2008 | 2.8315 | 18000 | 0.2891 |
|
74 |
-
| 0.184 | 2.8394 | 18050 | 0.2883 |
|
75 |
-
| 0.1732 | 2.8473 | 18100 | 0.2896 |
|
76 |
-
| 0.1905 | 2.8551 | 18150 | 0.2895 |
|
77 |
-
| 0.1812 | 2.8630 | 18200 | 0.2895 |
|
78 |
-
| 0.1941 | 2.8709 | 18250 | 0.2899 |
|
79 |
-
| 0.2063 | 2.8787 | 18300 | 0.2879 |
|
80 |
-
| 0.1982 | 2.8866 | 18350 | 0.2868 |
|
81 |
-
| 0.1946 | 2.8944 | 18400 | 0.2895 |
|
82 |
-
| 0.2104 | 2.9023 | 18450 | 0.2874 |
|
83 |
-
| 0.1851 | 2.9102 | 18500 | 0.2878 |
|
84 |
-
| 0.1968 | 2.9180 | 18550 | 0.2868 |
|
85 |
-
| 0.1964 | 2.9259 | 18600 | 0.2880 |
|
86 |
-
| 0.1863 | 2.9338 | 18650 | 0.2880 |
|
87 |
-
| 0.1875 | 2.9416 | 18700 | 0.2876 |
|
88 |
-
| 0.1698 | 2.9495 | 18750 | 0.2863 |
|
89 |
-
| 0.2082 | 2.9574 | 18800 | 0.2881 |
|
90 |
-
| 0.1962 | 2.9652 | 18850 | 0.2869 |
|
91 |
-
| 0.2061 | 2.9731 | 18900 | 0.2860 |
|
92 |
-
| 0.2132 | 2.9810 | 18950 | 0.2869 |
|
93 |
-
| 0.1854 | 2.9888 | 19000 | 0.2875 |
|
94 |
-
| 0.1906 | 2.9967 | 19050 | 0.2879 |
|
95 |
-
| 0.144 | 3.0046 | 19100 | 0.3005 |
|
96 |
-
| 0.1302 | 3.0124 | 19150 | 0.3097 |
|
97 |
-
| 0.1324 | 3.0203 | 19200 | 0.3090 |
|
98 |
-
| 0.1344 | 3.0282 | 19250 | 0.3094 |
|
99 |
-
| 0.1392 | 3.0360 | 19300 | 0.3064 |
|
100 |
-
| 0.1464 | 3.0439 | 19350 | 0.3066 |
|
101 |
-
| 0.141 | 3.0518 | 19400 | 0.3070 |
|
102 |
-
| 0.1275 | 3.0596 | 19450 | 0.3103 |
|
103 |
-
| 0.1284 | 3.0675 | 19500 | 0.3074 |
|
104 |
-
| 0.1397 | 3.0754 | 19550 | 0.3111 |
|
105 |
-
| 0.1335 | 3.0832 | 19600 | 0.3105 |
|
106 |
-
| 0.1302 | 3.0911 | 19650 | 0.3082 |
|
107 |
-
| 0.1315 | 3.0989 | 19700 | 0.3094 |
|
108 |
-
| 0.128 | 3.1068 | 19750 | 0.3110 |
|
109 |
-
| 0.1272 | 3.1147 | 19800 | 0.3094 |
|
110 |
-
| 0.1227 | 3.1225 | 19850 | 0.3074 |
|
111 |
-
| 0.1375 | 3.1304 | 19900 | 0.3093 |
|
112 |
-
| 0.1344 | 3.1383 | 19950 | 0.3092 |
|
113 |
-
| 0.1301 | 3.1461 | 20000 | 0.3098 |
|
114 |
-
| 0.1339 | 3.1540 | 20050 | 0.3083 |
|
115 |
-
| 0.1398 | 3.1619 | 20100 | 0.3100 |
|
116 |
-
| 0.132 | 3.1697 | 20150 | 0.3109 |
|
117 |
-
| 0.1499 | 3.1776 | 20200 | 0.3070 |
|
118 |
-
| 0.1438 | 3.1855 | 20250 | 0.3075 |
|
119 |
-
| 0.1267 | 3.1933 | 20300 | 0.3106 |
|
120 |
-
| 0.1282 | 3.2012 | 20350 | 0.3082 |
|
121 |
-
| 0.1365 | 3.2091 | 20400 | 0.3075 |
|
122 |
-
| 0.1239 | 3.2169 | 20450 | 0.3110 |
|
123 |
-
| 0.1507 | 3.2248 | 20500 | 0.3087 |
|
124 |
-
| 0.1364 | 3.2327 | 20550 | 0.3112 |
|
125 |
-
| 0.1281 | 3.2405 | 20600 | 0.3092 |
|
126 |
-
| 0.1271 | 3.2484 | 20650 | 0.3104 |
|
127 |
-
| 0.1124 | 3.2563 | 20700 | 0.3097 |
|
128 |
-
| 0.1382 | 3.2641 | 20750 | 0.3111 |
|
129 |
-
| 0.1415 | 3.2720 | 20800 | 0.3101 |
|
130 |
-
| 0.1246 | 3.2798 | 20850 | 0.3115 |
|
131 |
-
| 0.1337 | 3.2877 | 20900 | 0.3095 |
|
132 |
-
| 0.1378 | 3.2956 | 20950 | 0.3069 |
|
133 |
-
| 0.1219 | 3.3034 | 21000 | 0.3081 |
|
134 |
-
| 0.1303 | 3.3113 | 21050 | 0.3098 |
|
135 |
-
| 0.1445 | 3.3192 | 21100 | 0.3081 |
|
136 |
-
| 0.134 | 3.3270 | 21150 | 0.3090 |
|
137 |
-
| 0.1389 | 3.3349 | 21200 | 0.3098 |
|
138 |
-
| 0.1388 | 3.3428 | 21250 | 0.3087 |
|
139 |
-
| 0.1317 | 3.3506 | 21300 | 0.3094 |
|
140 |
-
| 0.1367 | 3.3585 | 21350 | 0.3080 |
|
141 |
-
| 0.1267 | 3.3664 | 21400 | 0.3092 |
|
142 |
-
| 0.1333 | 3.3742 | 21450 | 0.3102 |
|
143 |
-
| 0.1266 | 3.3821 | 21500 | 0.3102 |
|
144 |
-
| 0.1345 | 3.3900 | 21550 | 0.3075 |
|
145 |
-
| 0.1279 | 3.3978 | 21600 | 0.3083 |
|
146 |
-
| 0.1342 | 3.4057 | 21650 | 0.3078 |
|
147 |
-
| 0.141 | 3.4136 | 21700 | 0.3102 |
|
148 |
-
| 0.1241 | 3.4214 | 21750 | 0.3066 |
|
149 |
-
| 0.14 | 3.4293 | 21800 | 0.3083 |
|
150 |
-
| 0.1232 | 3.4372 | 21850 | 0.3070 |
|
151 |
-
| 0.1296 | 3.4450 | 21900 | 0.3081 |
|
152 |
-
| 0.1286 | 3.4529 | 21950 | 0.3065 |
|
153 |
-
| 0.1313 | 3.4608 | 22000 | 0.3071 |
|
154 |
-
| 0.1484 | 3.4686 | 22050 | 0.3058 |
|
155 |
-
| 0.1395 | 3.4765 | 22100 | 0.3074 |
|
156 |
-
| 0.1311 | 3.4843 | 22150 | 0.3064 |
|
157 |
-
| 0.1116 | 3.4922 | 22200 | 0.3095 |
|
158 |
-
| 0.1269 | 3.5001 | 22250 | 0.3102 |
|
159 |
-
| 0.1308 | 3.5079 | 22300 | 0.3067 |
|
160 |
-
| 0.127 | 3.5158 | 22350 | 0.3077 |
|
161 |
-
| 0.1176 | 3.5237 | 22400 | 0.3086 |
|
162 |
-
| 0.1234 | 3.5315 | 22450 | 0.3095 |
|
163 |
-
| 0.1359 | 3.5394 | 22500 | 0.3075 |
|
164 |
-
| 0.1337 | 3.5473 | 22550 | 0.3083 |
|
165 |
-
| 0.1224 | 3.5551 | 22600 | 0.3088 |
|
166 |
-
| 0.1286 | 3.5630 | 22650 | 0.3090 |
|
167 |
-
| 0.1341 | 3.5709 | 22700 | 0.3076 |
|
168 |
-
| 0.1419 | 3.5787 | 22750 | 0.3099 |
|
169 |
-
| 0.1478 | 3.5866 | 22800 | 0.3072 |
|
170 |
-
| 0.1215 | 3.5945 | 22850 | 0.3080 |
|
171 |
-
| 0.1298 | 3.6023 | 22900 | 0.3073 |
|
172 |
-
| 0.1368 | 3.6102 | 22950 | 0.3071 |
|
173 |
-
| 0.1388 | 3.6181 | 23000 | 0.3070 |
|
174 |
-
| 0.1239 | 3.6259 | 23050 | 0.3069 |
|
175 |
-
| 0.1202 | 3.6338 | 23100 | 0.3066 |
|
176 |
-
| 0.1329 | 3.6417 | 23150 | 0.3060 |
|
177 |
-
| 0.1262 | 3.6495 | 23200 | 0.3070 |
|
178 |
-
| 0.1221 | 3.6574 | 23250 | 0.3084 |
|
179 |
-
| 0.1233 | 3.6653 | 23300 | 0.3068 |
|
180 |
-
| 0.1222 | 3.6731 | 23350 | 0.3063 |
|
181 |
-
| 0.133 | 3.6810 | 23400 | 0.3067 |
|
182 |
-
| 0.1276 | 3.6888 | 23450 | 0.3054 |
|
183 |
-
| 0.1214 | 3.6967 | 23500 | 0.3065 |
|
184 |
-
| 0.1308 | 3.7046 | 23550 | 0.3072 |
|
185 |
-
| 0.1278 | 3.7124 | 23600 | 0.3074 |
|
186 |
-
| 0.1177 | 3.7203 | 23650 | 0.3070 |
|
187 |
-
| 0.1302 | 3.7282 | 23700 | 0.3067 |
|
188 |
-
| 0.1279 | 3.7360 | 23750 | 0.3068 |
|
189 |
-
| 0.132 | 3.7439 | 23800 | 0.3078 |
|
190 |
-
| 0.143 | 3.7518 | 23850 | 0.3070 |
|
191 |
-
| 0.1365 | 3.7596 | 23900 | 0.3068 |
|
192 |
-
| 0.1456 | 3.7675 | 23950 | 0.3073 |
|
193 |
-
| 0.1312 | 3.7754 | 24000 | 0.3069 |
|
194 |
-
| 0.1304 | 3.7832 | 24050 | 0.3073 |
|
195 |
-
| 0.1409 | 3.7911 | 24100 | 0.3069 |
|
196 |
-
| 0.1369 | 3.7990 | 24150 | 0.3067 |
|
197 |
-
| 0.1291 | 3.8068 | 24200 | 0.3065 |
|
198 |
-
| 0.1114 | 3.8147 | 24250 | 0.3073 |
|
199 |
-
| 0.1294 | 3.8226 | 24300 | 0.3067 |
|
200 |
-
| 0.1223 | 3.8304 | 24350 | 0.3071 |
|
201 |
-
| 0.1166 | 3.8383 | 24400 | 0.3074 |
|
202 |
-
| 0.1233 | 3.8462 | 24450 | 0.3076 |
|
203 |
-
| 0.1348 | 3.8540 | 24500 | 0.3073 |
|
204 |
-
| 0.1203 | 3.8619 | 24550 | 0.3069 |
|
205 |
-
| 0.1326 | 3.8697 | 24600 | 0.3072 |
|
206 |
-
| 0.123 | 3.8776 | 24650 | 0.3070 |
|
207 |
-
| 0.13 | 3.8855 | 24700 | 0.3069 |
|
208 |
-
| 0.1305 | 3.8933 | 24750 | 0.3073 |
|
209 |
-
| 0.1366 | 3.9012 | 24800 | 0.3075 |
|
210 |
-
| 0.1428 | 3.9091 | 24850 | 0.3078 |
|
211 |
-
| 0.1258 | 3.9169 | 24900 | 0.3075 |
|
212 |
-
| 0.135 | 3.9248 | 24950 | 0.3073 |
|
213 |
-
| 0.1282 | 3.9327 | 25000 | 0.3071 |
|
214 |
-
| 0.1323 | 3.9405 | 25050 | 0.3073 |
|
215 |
-
| 0.1242 | 3.9484 | 25100 | 0.3072 |
|
216 |
-
| 0.1453 | 3.9563 | 25150 | 0.3071 |
|
217 |
-
| 0.1441 | 3.9641 | 25200 | 0.3067 |
|
218 |
-
| 0.1273 | 3.9720 | 25250 | 0.3066 |
|
219 |
-
| 0.115 | 3.9799 | 25300 | 0.3067 |
|
220 |
-
| 0.1337 | 3.9877 | 25350 | 0.3067 |
|
221 |
-
| 0.1259 | 3.9956 | 25400 | 0.3066 |
|
222 |
|
223 |
|
224 |
-
### Framework versions
|
225 |
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
base_model: codellama/CodeLlama-7b-Instruct-hf
|
3 |
+
library_name: transformers
|
4 |
+
model_name: Codellama-7b-lora-rps-adapter
|
5 |
tags:
|
6 |
+
- generated_from_trainer
|
7 |
- trl
|
8 |
- sft
|
9 |
+
licence: license
|
|
|
|
|
|
|
10 |
---
|
11 |
|
12 |
+
# Model Card for Codellama-7b-lora-rps-adapter
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
+
This model is a fine-tuned version of [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf).
|
15 |
+
It has been trained using [TRL](https://github.com/huggingface/trl).
|
16 |
|
17 |
+
## Quick start
|
18 |
|
19 |
+
```python
|
20 |
+
from transformers import pipeline
|
21 |
|
22 |
+
question = "If you had a time machine, but could only go to the past or the future once and never return, which would you choose and why?"
|
23 |
+
generator = pipeline("text-generation", model="SimonMA/Codellama-7b-lora-rps-adapter", device="cuda")
|
24 |
+
output = generator([{"role": "user", "content": question}], max_new_tokens=128, return_full_text=False)[0]
|
25 |
+
print(output["generated_text"])
|
26 |
+
```
|
27 |
|
28 |
+
## Training procedure
|
29 |
|
|
|
30 |
|
|
|
31 |
|
32 |
+
This model was trained with SFT.
|
33 |
|
34 |
+
### Framework versions
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
|
36 |
+
- TRL: 0.12.0
|
37 |
+
- Transformers: 4.46.2
|
38 |
+
- Pytorch: 2.4.1+cu121
|
39 |
+
- Datasets: 3.1.0
|
40 |
+
- Tokenizers: 0.20.3
|
41 |
|
42 |
+
## Citations
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
|
44 |
|
|
|
45 |
|
46 |
+
Cite TRL as:
|
47 |
+
|
48 |
+
```bibtex
|
49 |
+
@misc{vonwerra2022trl,
|
50 |
+
title = {{TRL: Transformer Reinforcement Learning}},
|
51 |
+
author = {Leandro von Werra and Younes Belkada and Lewis Tunstall and Edward Beeching and Tristan Thrush and Nathan Lambert and Shengyi Huang and Kashif Rasul and Quentin Gallouédec},
|
52 |
+
year = 2020,
|
53 |
+
journal = {GitHub repository},
|
54 |
+
publisher = {GitHub},
|
55 |
+
howpublished = {\url{https://github.com/huggingface/trl}}
|
56 |
+
}
|
57 |
+
```
|
adapter_config.json
CHANGED
@@ -20,13 +20,13 @@
|
|
20 |
"rank_pattern": {},
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
-
"
|
24 |
"o_proj",
|
25 |
-
"k_proj",
|
26 |
-
"q_proj",
|
27 |
"gate_proj",
|
28 |
-
"
|
29 |
-
"
|
|
|
|
|
30 |
],
|
31 |
"task_type": "CAUSAL_LM",
|
32 |
"use_dora": false,
|
|
|
20 |
"rank_pattern": {},
|
21 |
"revision": null,
|
22 |
"target_modules": [
|
23 |
+
"v_proj",
|
24 |
"o_proj",
|
|
|
|
|
25 |
"gate_proj",
|
26 |
+
"up_proj",
|
27 |
+
"down_proj",
|
28 |
+
"k_proj",
|
29 |
+
"q_proj"
|
30 |
],
|
31 |
"task_type": "CAUSAL_LM",
|
32 |
"use_dora": false,
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2332095256
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e300e9ce578ee5c5d293eb7a2081aba28c6d1693030e10d5649cc892d2c0c288
|
3 |
size 2332095256
|
runs/Nov08_11-01-31_f9264325da9e/events.out.tfevents.1731063704.f9264325da9e.13873.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f2d9b9212894bb1c1763ef0255e4e0deac34013a2d1d9b563d8c102349c599b3
|
3 |
+
size 170730
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5560
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:32ae18e8fac07e67fc5ea615a995e7dd1f92b8d9f9bb32e00065f4d5db168749
|
3 |
size 5560
|