File size: 2,708 Bytes
1b66f8d
5da61b4
 
 
 
 
 
 
3e9f86e
1b66f8d
 
 
 
 
 
 
 
3aa8136
1b66f8d
 
833fd87
 
7482c01
1b66f8d
 
 
 
7f827cf
 
 
 
1b66f8d
 
7f827cf
 
5da61b4
 
 
1b66f8d
 
5da61b4
1b66f8d
 
 
 
3aa8136
 
4f431ff
1b66f8d
 
91e621a
 
5da61b4
1b66f8d
5da61b4
1b66f8d
8811ee0
91e621a
1b66f8d
 
 
 
 
3e9f86e
 
 
 
 
 
 
 
 
 
 
 
1b66f8d
 
 
 
 
3aa8136
1b66f8d
5da61b4
91e621a
ad02fa3
7482c01
3e9f86e
 
 
 
7482c01
 
1b66f8d
 
 
 
 
 
 
 
5da61b4
1b66f8d
0bfcf81
1b66f8d
 
 
 
3aa8136
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
<script lang="ts">
	import ChatWindow from "$lib/components/chat/ChatWindow.svelte";
	import { pendingMessage } from "$lib/stores/pendingMessage";
	import { onMount } from "svelte";
	import type { PageData } from "./$types";
	import { page } from "$app/stores";
	import { HfInference } from "@huggingface/inference";
	import { invalidate } from "$app/navigation";
	import { base } from "$app/paths";

	export let data: PageData;

	$: messages = data.messages;

	const hf = new HfInference();

	let loading = false;
	let pending = false;

	async function getTextGenerationStream(inputs: string) {
		let conversationId = $page.params.id;

		const response = hf.endpoint($page.url.href).textGenerationStream(
			{
				inputs,
				parameters: {
					// Taken from https://huggingface.co/spaces/huggingface/open-assistant-private-testing/blob/main/app.py#L54
					temperature: 0.9,
					top_p: 0.95,
					repetition_penalty: 1.2,
					top_k: 50,
					// @ts-ignore
					truncate: 1024,
					watermark: false,
					max_new_tokens: 1024,
					stop: ["<|endoftext|>"],
					return_full_text: false,
				},
			},
			{
				use_cache: false,
			}
		);

		for await (const data of response) {
			pending = false;

			if (!data || conversationId !== $page.params.id) break;

			if (!data.token.special) {
				const lastMessage = messages.at(-1);

				if (lastMessage?.from !== "assistant") {
					// First token has a space at the beginning, trim it
					messages = [...messages, { from: "assistant", content: data.token.text.trimStart() }];
				} else {
					lastMessage.content += data.token.text;
					messages = [...messages];
				}
			}
		}
	}

	async function summarizeTitle(id: string) {
		const response = await fetch(`${base}/conversation/${id}/summarize`, {
			method: "POST",
		});
		if (response.ok) {
			/// TODO(actually invalidate)
			await invalidate("/");
			await invalidate((url) => url.pathname === "/" || url.pathname === base);
			location.reload();
		}
	}

	async function writeMessage(message: string) {
		if (!message.trim()) return;

		try {
			loading = true;
			pending = true;

			messages = [...messages, { from: "user", content: message }];

			await getTextGenerationStream(message);

			if (messages.filter((m) => m.from === "user").length === 1) {
				summarizeTitle($page.params.id).catch(console.error);
			}

			// Reload conversation order - doesn't seem to work
			// await invalidate('/');
		} finally {
			loading = false;
		}
	}

	onMount(async () => {
		if ($pendingMessage) {
			const val = $pendingMessage;
			$pendingMessage = "";

			writeMessage(val);
		}
	});
</script>

<ChatWindow {loading} {pending} {messages} on:message={(message) => writeMessage(message.detail)} />