File size: 14,257 Bytes
1189124
04de42a
 
4b8b411
80b13a0
4b8b411
1189124
 
 
dd66861
60216ec
24e0413
60216ec
4b8b411
da972f7
60216ec
 
 
04de42a
60216ec
 
5541896
1189124
7956c78
1189124
b61328c
 
1189124
80b13a0
 
 
bacbf56
24e0413
bacbf56
b61328c
 
60216ec
bacbf56
7d5d2bc
4d8160c
1189124
4ae5188
58fa8dd
1189124
1feceb6
 
51a1671
ea99635
1189124
964e49f
da972f7
b3fa4b5
1189124
7d5d2bc
dd66861
1189124
651e1d0
 
 
60216ec
 
 
651e1d0
1189124
 
 
9705bbb
e61727f
1189124
 
 
b61328c
 
1189124
 
 
ea99635
 
1189124
 
 
 
 
 
 
 
 
e4a4cf3
 
 
 
 
1189124
 
1feceb6
 
 
1189124
 
ea99635
 
 
 
1feceb6
60216ec
b06f4f9
ea99635
 
 
 
 
60216ec
b06f4f9
 
 
 
1feceb6
 
 
 
 
ea99635
 
b61328c
ea99635
 
 
51a1671
 
 
 
ea99635
 
 
51a1671
1feceb6
ea99635
 
 
1feceb6
1189124
279de18
6e870f4
 
 
bf9fb3d
 
 
da972f7
0c66843
bf9fb3d
 
 
 
 
 
1189124
 
 
907cb25
1189124
 
 
 
45307d1
1189124
 
 
be78046
4d8160c
 
 
ed1ed33
 
 
 
b6d0c8a
da972f7
 
ed1ed33
 
 
 
 
4d8160c
 
da972f7
 
 
 
 
 
 
be78046
 
 
1189124
 
 
da972f7
1189124
 
3b5cc6c
1189124
f250f57
1189124
f250f57
1189124
f250f57
dd66861
 
1189124
dd66861
1189124
 
 
dd66861
60216ec
 
812e078
 
1189124
 
 
 
90f84fa
6bb767e
ca9d5ef
6bb767e
04de42a
b34b73b
7d5d2bc
b34b73b
ce92b65
b34b73b
60216ec
b34b73b
1189124
 
b9b7060
1189124
f250f57
 
 
ca9d5ef
f250f57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ca9d5ef
 
f250f57
 
 
1189124
 
 
 
 
a979074
 
1189124
1feceb6
 
 
1189124
 
 
 
 
 
a979074
1feceb6
 
1189124
 
 
 
 
 
 
 
 
 
 
 
6bb767e
 
 
 
 
 
 
1189124
 
 
 
 
 
 
 
 
 
 
 
 
 
60216ec
45307d1
1189124
 
 
 
 
f250f57
1457dc9
f250f57
1457dc9
429b74f
 
 
 
 
 
 
 
 
 
 
47215b1
429b74f
 
 
1189124
44d9405
80e3641
1457dc9
60216ec
1457dc9
 
 
 
1189124
1457dc9
 
 
 
1189124
 
 
1457dc9
1189124
5541896
 
 
 
 
 
 
 
1feceb6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
<script lang="ts">
	import type { Conversation, ModelEntryWithTokenizer } from "./types";
	import type { ChatCompletionInputMessage } from "@huggingface/tasks";

	import { page } from "$app/stores";
	import { defaultGenerationConfig } from "./generationConfigSettings";
	import {
		createHfInference,
		handleStreamingResponse,
		handleNonStreamingResponse,
		isSystemPromptSupported,
		FEATURED_MODELS_IDS,
	} from "./inferencePlaygroundUtils";

	import { onDestroy, onMount } from "svelte";
	import GenerationConfig from "./InferencePlaygroundGenerationConfig.svelte";
	import HFTokenModal from "./InferencePlaygroundHFTokenModal.svelte";
	import ModelSelector from "./InferencePlaygroundModelSelector.svelte";
	import PlaygroundConversation from "./InferencePlaygroundConversation.svelte";
	import IconDelete from "../Icons/IconDelete.svelte";
	import IconCode from "../Icons/IconCode.svelte";
	import IconInfo from "../Icons/IconInfo.svelte";

	export let models: ModelEntryWithTokenizer[];

	const startMessageUser: ChatCompletionInputMessage = { role: "user", content: "" };
	const startMessageSystem: ChatCompletionInputMessage = { role: "system", content: "" };

	const modelIdFromQueryParam = $page.url.searchParams.get("modelId");
	const modelFromQueryParam = models.find(model => model.id === modelIdFromQueryParam);

	let conversation: Conversation = {
		model: modelFromQueryParam ?? models.find(m => FEATURED_MODELS_IDS.includes(m.id)) ?? models[0],
		config: defaultGenerationConfig,
		messages: [{ ...startMessageUser }],
		systemMessage: startMessageSystem,
		streaming: true,
	};

	let hfToken = "";
	let viewCode = false;
	let viewSettings = false;
	let showTokenModal = false;
	let loading = false;
	let latency = 0; // Renamed to total generation time
	let firstTokenLatency = 0; // New variable for first token latency
	let generatedTokensCount = 0;
	let abortController: AbortController | undefined = undefined;
	let waitForNonStreaming = true;
	let storeLocallyHfToken = true;

	const hfTokenLocalStorageKey = "hf_token";

	$: systemPromptSupported = isSystemPromptSupported(conversation.model);

	function addMessage() {
		conversation.messages = [
			...conversation.messages,
			{
				role: conversation.messages.at(-1)?.role === "user" ? "assistant" : "user",
				content: "",
			},
		];
	}

	function deleteMessage(idx: number) {
		conversation.messages.splice(idx, 1)[0];
		conversation = conversation;
	}

	function reset() {
		conversation.systemMessage.content = "";
		conversation.messages = [{ ...startMessageUser }];
	}

	function abort() {
		abortController?.abort();
		abortController = undefined;
		loading = false;
		waitForNonStreaming = false;
	}

	async function submit() {
		if (!hfToken) {
			showTokenModal = true;
			return;
		}

		if (conversation.messages.at(-1)?.role === "assistant") {
			return alert("Messages must alternate between user/assistant roles.");
		}

		(document.activeElement as HTMLElement).blur();
		loading = true;
		firstTokenLatency = 0; // Reset before each submission
		generatedTokensCount = 0; // Reset before each submission


		try {
			const startTime = performance.now();
			const hf = createHfInference(hfToken);

			if (conversation.streaming) {
				let firstTokenReceived = false; // Flag to track first token
				const streamingMessage = { role: "assistant", content: "" };
				conversation.messages = [...conversation.messages, streamingMessage];
				abortController = new AbortController();

				await handleStreamingResponse(
					hf,
					conversation,
					content => {
						if (streamingMessage) {
							streamingMessage.content = content;
							conversation.messages = [...conversation.messages];
							generatedTokensCount += 1;

							if (!firstTokenReceived) { // Check if it's the first token
								firstTokenLatency = Math.round(performance.now() - startTime);
								firstTokenReceived = true;
							}
						}
					},
					abortController
				);
			} else {
				waitForNonStreaming = true;
				const { message: newMessage, completion_tokens: newTokensCount } = await handleNonStreamingResponse(
					hf,
					conversation
				);
				// check if the user did not abort the request
				if (waitForNonStreaming) {
					conversation.messages = [...conversation.messages, newMessage];
					generatedTokensCount += newTokensCount;
					firstTokenLatency = latency; //In non-streaming, first token latency equals total latency.
				}
			}

			latency = Math.round(performance.now() - startTime); // Total generation time
		} catch (error) {
			if (conversation.messages.at(-1)?.role === "assistant" && !conversation.messages.at(-1)?.content?.trim()) {
				conversation.messages.pop();
				conversation.messages = [...conversation.messages];
			}
			if (error instanceof Error) {
				if (error.message.includes("token seems invalid")) {
					hfToken = "";
					localStorage.removeItem(hfTokenLocalStorageKey);
					showTokenModal = true;
				}
				if (error.name !== "AbortError") {
					alert("error: " + error.message);
				}
			} else {
				alert("An unknown error occurred");
			}
		} finally {
			loading = false;
			abortController = undefined;
		}
	}

	function onKeydown(event: KeyboardEvent) {
		if ((event.ctrlKey || event.metaKey) && event.key === "Enter") {
			submit();
		}
	}

	function handleTokenSubmit(e: Event) {
		const form = e.target as HTMLFormElement;
		const formData = new FormData(form);
		const submittedHfToken = (formData.get("hf-token") as string).trim() ?? "";
		const RE_HF_TOKEN = /\bhf_[a-zA-Z0-9]{34}\b/;
		if (RE_HF_TOKEN.test(submittedHfToken)) {
			hfToken = submittedHfToken;
			if (storeLocallyHfToken) {
				localStorage.setItem(hfTokenLocalStorageKey, JSON.stringify(hfToken));
			}
			submit();
			showTokenModal = false;
		} else {
			alert("Please provide a valid HF token.");
		}
	}

	onMount(() => {
		const storedHfToken = localStorage.getItem(hfTokenLocalStorageKey);
		if (storedHfToken !== null) {
			hfToken = JSON.parse(storedHfToken);
		}
	});

	onDestroy(() => {
		abortController?.abort();
	});
</script>

{#if showTokenModal}
	<HFTokenModal bind:storeLocallyHfToken on:close={() => (showTokenModal = false)} on:submit={handleTokenSubmit} />
{/if}

<!-- svelte-ignore a11y-no-static-element-interactions -->
<div
	class="w-dvh grid h-dvh divide-gray-200 overflow-hidden bg-gray-100/50 max-md:grid-rows-[120px,1fr] max-md:divide-y md:grid-cols-[clamp(220px,20%,350px),minmax(0,1fr),clamp(270px,25%,300px)] dark:divide-gray-800 dark:bg-gray-900 dark:text-gray-300 dark:[color-scheme:dark]"
>
	<div class="flex flex-col overflow-y-auto py-3 pr-3 max-md:pl-3">
		<div
			class="relative flex flex-1 flex-col gap-6 overflow-y-hidden rounded-r-xl border-x border-y border-gray-200/80 bg-gradient-to-b from-white via-white p-3 shadow-sm max-md:rounded-xl dark:border-white/5 dark:from-gray-800/40 dark:via-gray-800/40"
			class:pointer-events-none={!systemPromptSupported}
			class:opacity-70={!systemPromptSupported}
		>
			<div class="pb-2 text-sm font-semibold uppercase">system</div>
			<textarea
				name=""
				id=""
				placeholder={systemPromptSupported
					? "Enter a custom prompt"
					: "System prompt is not supported with the chosen model."}
				value={systemPromptSupported ? conversation.systemMessage.content : ""}
				on:input={e => (conversation.systemMessage.content = e.currentTarget.value)}
				class="absolute inset-x-0 bottom-0 h-full resize-none bg-transparent px-3 pt-10 text-sm outline-none"
			></textarea>
		</div>
	</div>
	<div class="relative divide-y divide-gray-200 dark:divide-gray-800" on:keydown={onKeydown}>
		<div
			class="flex h-[calc(100dvh-5rem-120px)] divide-x divide-gray-200 *:w-full md:h-[calc(100dvh-5rem)] md:pt-3 dark:divide-gray-800"
		>
			<PlaygroundConversation
				{loading}
				{conversation}
				{viewCode}
				{hfToken}
				on:addMessage={addMessage}
				on:deleteMessage={e => deleteMessage(e.detail)}
			/>
		</div>
		<div
			class="fixed inset-x-0 bottom-0 flex h-20 items-center gap-2 overflow-hidden whitespace-nowrap px-3 md:absolute"
		>
			<button
				type="button"
				on:click={() => (viewSettings = !viewSettings)}
				class="flex h-[39px] items-center gap-1 rounded-lg border border-gray-200 bg-white px-3 py-2.5 text-sm font-medium text-gray-900 hover:bg-gray-100 hover:text-blue-700 focus:z-10 focus:outline-none focus:ring-4 focus:ring-gray-100 md:hidden dark:border-gray-600 dark:bg-gray-800 dark:text-gray-400 dark:hover:bg-gray-700 dark:hover:text-white dark:focus:ring-gray-700"
			>
				<svg
					class="text-black dark:text-white"
					style=""
					xmlns="http://www.w3.org/2000/svg"
					xmlns:xlink="http://www.w3.org/1999/xlink"
					aria-hidden="true"
					focusable="false"
					role="img"
					width="1em"
					height="1em"
					preserveAspectRatio="xMidYMid meet"
					viewBox="0 0 24 24"
					><path
						fill="currentColor"
						d="M2.131 13.63a10 10 0 0 1 .001-3.26c1.101.026 2.092-.502 2.477-1.431c.385-.93.058-2.003-.74-2.763a10 10 0 0 1 2.306-2.307c.76.798 1.834 1.125 2.763.74c.93-.385 1.458-1.376 1.431-2.477a10 10 0 0 1 3.261 0c-.026 1.102.502 2.092 1.431 2.477c.93.385 2.003.058 2.763-.74a10 10 0 0 1 2.307 2.306c-.798.76-1.125 1.834-.74 2.764s1.376 1.458 2.477 1.43a10 10 0 0 1 0 3.262c-1.102-.027-2.092.501-2.477 1.43c-.385.93-.058 2.004.74 2.764a10 10 0 0 1-2.306 2.306c-.76-.798-1.834-1.125-2.764-.74s-1.458 1.376-1.43 2.478a10 10 0 0 1-3.262-.001c.027-1.101-.502-2.092-1.43-2.477c-.93-.385-2.004-.058-2.764.74a10 10 0 0 1-2.306-2.306c.798-.76 1.125-1.834.74-2.763c-.385-.93-1.376-1.458-2.478-1.431M12 15a3 3 0 1 0 0-6a3 3 0 0 0 0 6"
					/></svg
				>
				{!viewSettings ? "Settings" : "Hide Settings"}
			</button>
			<button
				type="button"
				on:click={reset}
				class="flex size-[39px] flex-none items-center justify-center rounded-lg border border-gray-200 bg-white text-sm font-medium text-gray-900 hover:bg-gray-100 hover:text-blue-700 focus:z-10 focus:outline-none focus:ring-4 focus:ring-gray-100 dark:border-gray-600 dark:bg-gray-800 dark:text-gray-400 dark:hover:bg-gray-700 dark:hover:text-white dark:focus:ring-gray-700"
			>
				<IconDelete />
			</button>
			<div class="flex-1 items-center justify-center text-center text-sm text-gray-500">
				<span class="max-xl:hidden">
					{generatedTokensCount} tokens · First Token: {firstTokenLatency}ms · Total Generation: {latency}ms
				</span>
			</div>
			<button
				type="button"
				on:click={() => (viewCode = !viewCode)}
				class="flex h-[39px] items-center gap-2 rounded-lg border border-gray-200 bg-white px-3 py-2.5 text-sm font-medium text-gray-900 hover:bg-gray-100 hover:text-blue-700 focus:z-10 focus:outline-none focus:ring-4 focus:ring-gray-100 dark:border-gray-600 dark:bg-gray-800 dark:text-gray-400 dark:hover:bg-gray-700 dark:hover:text-white dark:focus:ring-gray-700"
			>
				<IconCode />
				{!viewCode ? "View Code" : "Hide Code"}
			</button>
			<button
				on:click={() => {
					viewCode = false;
					loading ? abort() : submit();
				}}
				type="button"
				class="flex h-[39px] w-24 items-center justify-center gap-2 rounded-lg px-5 py-2.5 text-sm font-medium text-white focus:outline-none focus:ring-4 focus:ring-gray-300 dark:border-gray-700 dark:focus:ring-gray-700 {loading
					? 'bg-red-900 hover:bg-red-800 dark:bg-red-600 dark:hover:bg-red-700'
					: 'bg-black hover:bg-gray-900 dark:bg-blue-600 dark:hover:bg-blue-700'}"
			>
				{#if loading}
					<div class="flex flex-none items-center gap-[3px]">
						<span class="mr-2">
							{#if conversation.streaming}
								Stop
							{:else}
								Cancel
							{/if}
						</span>
						<div
							class="h-1 w-1 flex-none animate-bounce rounded-full bg-gray-500 dark:bg-gray-100"
							style="animation-delay: 0.25s;"
						/>
						<div
							class="h-1 w-1 flex-none animate-bounce rounded-full bg-gray-500 dark:bg-gray-100"
							style="animation-delay: 0.5s;"
						/>
						<div
							class="h-1 w-1 flex-none animate-bounce rounded-full bg-gray-500 dark:bg-gray-100"
							style="animation-delay: 0.75s;"
						/>
					</div>
				{:else}
					Run <span class="inline-flex gap-0.5 rounded border border-white/20 bg-white/10 px-0.5 text-xs text-white/70"
						><span class="translate-y-px"></span></span
					>
				{/if}
			</button>
		</div>
	</div>
	<div class="flex flex-col p-3 {viewSettings ? 'max-md:fixed' : 'max-md:hidden'} max-md:inset-x-0 max-md:bottom-20">
		<div
			class="flex flex-1 flex-col gap-6 overflow-y-hidden rounded-xl border border-gray-200/80 bg-white bg-gradient-to-b from-white via-white p-3 shadow-sm dark:border-white/5 dark:bg-gray-900 dark:from-gray-800/40 dark:via-gray-800/40"
		>
			<div class="flex flex-col gap-2">
				<ModelSelector {models} bind:conversation />
				<div class="self-end text-xs">
					<a
						href="https://huggingface.co/{conversation.model.id}"
						target="_blank"
						class="flex items-center gap-0.5 text-gray-500 hover:text-gray-700 dark:hover:text-gray-400"
					>
						<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" viewBox="0 0 32 32"
							><path fill="currentColor" d="M10 6v2h12.59L6 24.59L7.41 26L24 9.41V22h2V6H10z" /></svg
						>
						Model page
					</a>
				</div>
			</div>

			<GenerationConfig bind:conversation />
			<div class="mt-auto hidden">
				<div class="mb-3 flex items-center justify-between gap-2">
					<label for="default-range" class="block text-sm font-medium text-gray-900 dark:text-white">API Quota</label>
					<span
						class="rounded bg-gray-100 px-1.5 py-0.5 text-xs font-medium text-gray-800 dark:bg-gray-700 dark:text-gray-300"
						>Free</span
					>

					<div class="ml-auto w-12 text-right text-sm">76%</div>
				</div>
				<div class="h-2 w-full rounded-full bg-gray-200 dark:bg-gray-700">
					<div class="h-2 rounded-full bg-black dark:bg-gray-400" style="width: 75%"></div>
				</div>
			</div>
		</div>
	</div>
</div>

<a
	target="_blank"
	href="https://huggingface.co/spaces/huggingface/inference-playground/discussions/1"
	class="absolute bottom-6 left-4 flex items-center gap-1 text-sm text-gray-500 underline decoration-gray-300 hover:text-gray-800 max-md:hidden dark:text-gray-400 dark:decoration-gray-600 dark:hover:text-gray-200"
>
	<IconInfo classNames="text-xs" />
	Give feedback
</a>