Spaces:
Running
Running
minor ui changes (#4)
Browse files- minor ui changes (dc843c4a87d1f22ae05772f0982743d3b8181797)
- fix first token (c0a0d79638236daaede01e3b5bc9d65d9c87ac16)
Co-authored-by: Radamés Ajna <radames@users.noreply.huggingface.co>
- index.html +103 -85
- llama2cWorker.js +9 -2
index.html
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
<body></body>
|
7 |
</html>
|
8 |
|
9 |
-
<!
|
10 |
<html>
|
11 |
<head>
|
12 |
<meta charset="UTF-8" />
|
@@ -56,6 +56,7 @@
|
|
56 |
const weightsURL = `${MODELS_BASE_URL}/${model.url}`;
|
57 |
const prompt = getValue("prompt");
|
58 |
const temperature = getValue("temperature");
|
|
|
59 |
const repeatPenalty = getValue("repeat_penalty");
|
60 |
const seed = getValue("seed");
|
61 |
const maxSeqLen = getValue("max-seq");
|
@@ -99,6 +100,7 @@
|
|
99 |
tokenizerURL: "tokenizer.json",
|
100 |
prompt,
|
101 |
temp: temperature,
|
|
|
102 |
repeatPenalty,
|
103 |
seed: BigInt(seed),
|
104 |
maxSeqLen,
|
@@ -111,8 +113,18 @@
|
|
111 |
const handleMessage = (event) => {
|
112 |
const { status, error, message, prompt, sentence } = event.data;
|
113 |
if (status) updateStatus(event.data);
|
114 |
-
if (error)
|
115 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
116 |
};
|
117 |
|
118 |
controller.signal.addEventListener("abort", handleAbort);
|
@@ -204,8 +216,7 @@
|
|
204 |
<label for="model" class="font-medium">Models Options: </label>
|
205 |
<select
|
206 |
id="model"
|
207 |
-
class="border-2 border-gray-500 rounded-md font-light"
|
208 |
-
>
|
209 |
<option value="stories15M" selected>stories 15M (60.8 MB)</option>
|
210 |
<option value="stories42M">stories 42M (167 MB)</option>
|
211 |
<option value="stories110M">stories 110M (438 MB)</option>
|
@@ -213,117 +224,124 @@
|
|
213 |
</div>
|
214 |
<form
|
215 |
id="form"
|
216 |
-
class="flex text-normal px-1 py-1 border border-gray-700 rounded-md items-center"
|
217 |
-
>
|
218 |
<input type="submit" hidden />
|
219 |
<input
|
220 |
type="text"
|
221 |
id="prompt"
|
222 |
class="font-light w-full px-3 py-2 mx-1 resize-none outline-none"
|
223 |
placeholder="Add your prompt here..."
|
224 |
-
value="Once upon a time"
|
225 |
-
/>
|
226 |
<button id="clear-btn">
|
227 |
<svg
|
228 |
fill="none"
|
229 |
xmlns="http://www.w3.org/2000/svg"
|
230 |
width="40"
|
231 |
-
viewBox="0 0 70 40"
|
232 |
-
>
|
233 |
<path opacity=".5" d="M39 .2v40.2" stroke="#1F2937" />
|
234 |
<path
|
235 |
d="M1.5 11.5 19 29.1m0-17.6L1.5 29.1"
|
236 |
opacity=".5"
|
237 |
stroke="#1F2937"
|
238 |
-
stroke-width="2"
|
239 |
-
/>
|
240 |
</svg>
|
241 |
</button>
|
242 |
<button
|
243 |
id="run"
|
244 |
-
class="bg-gray-700 hover:bg-gray-800 text-white font-normal py-2 w-16 rounded disabled:bg-gray-300 disabled:cursor-not-allowed"
|
245 |
-
>
|
246 |
Run
|
247 |
</button>
|
248 |
</form>
|
249 |
-
<
|
250 |
-
<
|
251 |
-
<
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
261 |
-
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
267 |
-
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
282 |
|
283 |
-
|
284 |
-
|
285 |
-
|
286 |
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
>
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
>
|
314 |
-
|
315 |
-
</button>
|
316 |
-
</div>
|
317 |
<div>
|
318 |
<h3 class="font-medium">Generation:</h3>
|
319 |
<div
|
320 |
-
class="min-h-[250px] bg-slate-100 text-gray-500 p-4 rounded-md flex flex-col gap-2"
|
321 |
-
>
|
322 |
<div
|
323 |
id="output-counter"
|
324 |
hidden
|
325 |
-
class="ml-auto font-semibold grid-rows-1 text-sm"
|
326 |
-
></div>
|
327 |
<p hidden id="output-generation" class="grid-rows-2"></p>
|
328 |
<span id="output-status" class="m-auto font-light"
|
329 |
>No output yet</span
|
|
|
6 |
<body></body>
|
7 |
</html>
|
8 |
|
9 |
+
<!DOCTYPE html>
|
10 |
<html>
|
11 |
<head>
|
12 |
<meta charset="UTF-8" />
|
|
|
56 |
const weightsURL = `${MODELS_BASE_URL}/${model.url}`;
|
57 |
const prompt = getValue("prompt");
|
58 |
const temperature = getValue("temperature");
|
59 |
+
const topP = getValue("top-p");
|
60 |
const repeatPenalty = getValue("repeat_penalty");
|
61 |
const seed = getValue("seed");
|
62 |
const maxSeqLen = getValue("max-seq");
|
|
|
100 |
tokenizerURL: "tokenizer.json",
|
101 |
prompt,
|
102 |
temp: temperature,
|
103 |
+
top_p: topP,
|
104 |
repeatPenalty,
|
105 |
seed: BigInt(seed),
|
106 |
maxSeqLen,
|
|
|
113 |
const handleMessage = (event) => {
|
114 |
const { status, error, message, prompt, sentence } = event.data;
|
115 |
if (status) updateStatus(event.data);
|
116 |
+
if (error) {
|
117 |
+
llamaWorker.removeEventListener("message", handleMessage);
|
118 |
+
reject(new Error(error));
|
119 |
+
}
|
120 |
+
if (status === "aborted") {
|
121 |
+
llamaWorker.removeEventListener("message", handleMessage);
|
122 |
+
resolve(event.data);
|
123 |
+
}
|
124 |
+
if (status === "complete") {
|
125 |
+
llamaWorker.removeEventListener("message", handleMessage);
|
126 |
+
resolve(event.data);
|
127 |
+
}
|
128 |
};
|
129 |
|
130 |
controller.signal.addEventListener("abort", handleAbort);
|
|
|
216 |
<label for="model" class="font-medium">Models Options: </label>
|
217 |
<select
|
218 |
id="model"
|
219 |
+
class="border-2 border-gray-500 rounded-md font-light">
|
|
|
220 |
<option value="stories15M" selected>stories 15M (60.8 MB)</option>
|
221 |
<option value="stories42M">stories 42M (167 MB)</option>
|
222 |
<option value="stories110M">stories 110M (438 MB)</option>
|
|
|
224 |
</div>
|
225 |
<form
|
226 |
id="form"
|
227 |
+
class="flex text-normal px-1 py-1 border border-gray-700 rounded-md items-center">
|
|
|
228 |
<input type="submit" hidden />
|
229 |
<input
|
230 |
type="text"
|
231 |
id="prompt"
|
232 |
class="font-light w-full px-3 py-2 mx-1 resize-none outline-none"
|
233 |
placeholder="Add your prompt here..."
|
234 |
+
value="Once upon a time" />
|
|
|
235 |
<button id="clear-btn">
|
236 |
<svg
|
237 |
fill="none"
|
238 |
xmlns="http://www.w3.org/2000/svg"
|
239 |
width="40"
|
240 |
+
viewBox="0 0 70 40">
|
|
|
241 |
<path opacity=".5" d="M39 .2v40.2" stroke="#1F2937" />
|
242 |
<path
|
243 |
d="M1.5 11.5 19 29.1m0-17.6L1.5 29.1"
|
244 |
opacity=".5"
|
245 |
stroke="#1F2937"
|
246 |
+
stroke-width="2" />
|
|
|
247 |
</svg>
|
248 |
</button>
|
249 |
<button
|
250 |
id="run"
|
251 |
+
class="bg-gray-700 hover:bg-gray-800 text-white font-normal py-2 w-16 rounded disabled:bg-gray-300 disabled:cursor-not-allowed">
|
|
|
252 |
Run
|
253 |
</button>
|
254 |
</form>
|
255 |
+
<details>
|
256 |
+
<summary class="font-medium cursor-pointer">Advanced Options</summary>
|
257 |
+
<div class="grid grid-cols-3 max-w-md items-center gap-3 py-3">
|
258 |
+
<label class="text-sm font-medium" for="max-seq"
|
259 |
+
>Maximum length
|
260 |
+
</label>
|
261 |
+
<input
|
262 |
+
type="range"
|
263 |
+
id="max-seq"
|
264 |
+
name="max-seq"
|
265 |
+
min="1"
|
266 |
+
max="256"
|
267 |
+
step="1"
|
268 |
+
value="200"
|
269 |
+
oninput="this.nextElementSibling.value = Number(this.value)" />
|
270 |
+
<output
|
271 |
+
class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md">
|
272 |
+
200</output
|
273 |
+
>
|
274 |
+
<label class="text-sm font-medium" for="temperature"
|
275 |
+
>Temperature</label
|
276 |
+
>
|
277 |
+
<input
|
278 |
+
type="range"
|
279 |
+
id="temperature"
|
280 |
+
name="temperature"
|
281 |
+
min="0"
|
282 |
+
max="2"
|
283 |
+
step="0.01"
|
284 |
+
value="0.40"
|
285 |
+
oninput="this.nextElementSibling.value = Number(this.value).toFixed(2)" />
|
286 |
+
<output
|
287 |
+
class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md">
|
288 |
+
0.40</output
|
289 |
+
>
|
290 |
+
<label class="text-sm font-medium" for="top-p">Top-p</label>
|
291 |
+
<input
|
292 |
+
type="range"
|
293 |
+
id="top-p"
|
294 |
+
name="top-p"
|
295 |
+
min="0"
|
296 |
+
max="1"
|
297 |
+
step="0.01"
|
298 |
+
value="1.00"
|
299 |
+
oninput="this.nextElementSibling.value = Number(this.value).toFixed(2)" />
|
300 |
+
<output
|
301 |
+
class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md">
|
302 |
+
1.00</output
|
303 |
+
>
|
304 |
|
305 |
+
<label class="text-sm font-medium" for="repeat_penalty"
|
306 |
+
>Repeat Penalty</label
|
307 |
+
>
|
308 |
|
309 |
+
<input
|
310 |
+
type="range"
|
311 |
+
id="repeat_penalty"
|
312 |
+
name="repeat_penalty"
|
313 |
+
min="1"
|
314 |
+
max="2"
|
315 |
+
step="0.01"
|
316 |
+
value="1.10"
|
317 |
+
oninput="this.nextElementSibling.value = Number(this.value).toFixed(2)" />
|
318 |
+
<output
|
319 |
+
class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md"
|
320 |
+
>1.10</output
|
321 |
+
>
|
322 |
+
<label class="text-sm font-medium" for="seed">Seed</label>
|
323 |
+
<input
|
324 |
+
type="number"
|
325 |
+
id="seed"
|
326 |
+
name="seed"
|
327 |
+
value="299792458"
|
328 |
+
class="font-light border border-gray-700 text-right rounded-md p-2" />
|
329 |
+
<button
|
330 |
+
id="run"
|
331 |
+
onclick="document.querySelector('#seed').value = BigInt(Math.floor(Math.random() * 2**64-1))"
|
332 |
+
class="bg-gray-700 hover:bg-gray-800 text-white font-normal py-1 w-[50px] rounded disabled:bg-gray-300 disabled:cursor-not-allowed text-sm">
|
333 |
+
Rand
|
334 |
+
</button>
|
335 |
+
</div>
|
336 |
+
</details>
|
|
|
|
|
337 |
<div>
|
338 |
<h3 class="font-medium">Generation:</h3>
|
339 |
<div
|
340 |
+
class="min-h-[250px] bg-slate-100 text-gray-500 p-4 rounded-md flex flex-col gap-2">
|
|
|
341 |
<div
|
342 |
id="output-counter"
|
343 |
hidden
|
344 |
+
class="ml-auto font-semibold grid-rows-1 text-sm"></div>
|
|
|
345 |
<p hidden id="output-generation" class="grid-rows-2"></p>
|
346 |
<span id="output-status" class="m-auto font-light"
|
347 |
>No output yet</span
|
llama2cWorker.js
CHANGED
@@ -50,6 +50,7 @@ async function generate(data) {
|
|
50 |
tokenizerURL,
|
51 |
prompt,
|
52 |
temp,
|
|
|
53 |
repeatPenalty,
|
54 |
seed,
|
55 |
maxSeqLen,
|
@@ -59,11 +60,17 @@ async function generate(data) {
|
|
59 |
const model = await Llama2C.getInstance(weightsURL, modelID, tokenizerURL);
|
60 |
|
61 |
self.postMessage({ status: "loading", message: "Initializing model" });
|
62 |
-
model.init_with_prompt(
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
|
64 |
const seq_len = model.get_seq_len();
|
65 |
|
66 |
-
let sentence =
|
67 |
let maxTokens = maxSeqLen ? maxSeqLen : seq_len - prompt.length - 1;
|
68 |
let startTime = performance.now();
|
69 |
let tokensCount = 0;
|
|
|
50 |
tokenizerURL,
|
51 |
prompt,
|
52 |
temp,
|
53 |
+
top_p,
|
54 |
repeatPenalty,
|
55 |
seed,
|
56 |
maxSeqLen,
|
|
|
60 |
const model = await Llama2C.getInstance(weightsURL, modelID, tokenizerURL);
|
61 |
|
62 |
self.postMessage({ status: "loading", message: "Initializing model" });
|
63 |
+
const firstToken = model.init_with_prompt(
|
64 |
+
prompt,
|
65 |
+
temp,
|
66 |
+
top_p,
|
67 |
+
repeatPenalty,
|
68 |
+
seed
|
69 |
+
);
|
70 |
|
71 |
const seq_len = model.get_seq_len();
|
72 |
|
73 |
+
let sentence = firstToken;
|
74 |
let maxTokens = maxSeqLen ? maxSeqLen : seq_len - prompt.length - 1;
|
75 |
let startTime = performance.now();
|
76 |
let tokensCount = 0;
|