Spaces:
Build error
Build error
File size: 4,858 Bytes
1b66f8d 5da61b4 4a6603b 5da61b4 06ffd82 5da61b4 3e9f86e abaaa3e 9405a81 831f161 fe2328e 4a6603b 1b66f8d b56bba1 1b66f8d 831f161 2772555 831f161 1b66f8d 3aa8136 1b66f8d 4a6603b 833fd87 06ffd82 1b66f8d 06ffd82 1b66f8d 7f827cf 1b66f8d 3c650ed 7f827cf abaaa3e 5da61b4 1b66f8d 4a6603b 5da61b4 1b66f8d 3aa8136 2772555 1b66f8d 0abf663 9b20b88 0abf663 1b66f8d 91e621a 5da61b4 1b66f8d 4a6603b 1b66f8d 8811ee0 91e621a 1b66f8d 3e9f86e 831f161 3e9f86e 4a6603b 1b66f8d 2772555 1b66f8d 3aa8136 1b66f8d 4a6603b 91e621a 4a6603b 7482c01 3e9f86e 831f161 3e9f86e 34857c4 fe2328e 34857c4 1b66f8d 4a6603b 5da61b4 4a6603b 1b66f8d 4a6603b 1b66f8d b56bba1 1b66f8d b56bba1 9405a81 4a6603b 9405a81 2772555 9405a81 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 |
<script lang="ts">
import ChatWindow from "$lib/components/chat/ChatWindow.svelte";
import { pendingMessage } from "$lib/stores/pendingMessage";
import { pendingMessageIdToRetry } from "$lib/stores/pendingMessageIdToRetry";
import { onMount } from "svelte";
import { page } from "$app/stores";
import { textGenerationStream } from "@huggingface/inference";
import { invalidate } from "$app/navigation";
import { base } from "$app/paths";
import { PUBLIC_MAX_INPUT_TOKENS, PUBLIC_SEP_TOKEN } from "$env/static/public";
import { shareConversation } from "$lib/shareConversation";
import { UrlDependency } from "$lib/types/UrlDependency";
import { error } from "$lib/stores/errors";
import { randomUUID } from "$lib/utils/randomUuid";
export let data;
let messages = data.messages;
let lastLoadedMessages = data.messages;
let isAborted = false;
// Since we modify the messages array locally, we don't want to reset it if an old version is passed
$: if (data.messages !== lastLoadedMessages) {
messages = data.messages;
lastLoadedMessages = data.messages;
}
let loading = false;
let pending = false;
async function getTextGenerationStream(inputs: string, messageId: string, isRetry = false) {
let conversationId = $page.params.id;
const response = textGenerationStream(
{
model: $page.url.href,
inputs,
parameters: {
// Taken from https://huggingface.co/spaces/huggingface/open-assistant-private-testing/blob/main/app.py#L54
temperature: 0.9,
top_p: 0.95,
repetition_penalty: 1.2,
top_k: 50,
// @ts-ignore
truncate: parseInt(PUBLIC_MAX_INPUT_TOKENS),
watermark: false,
max_new_tokens: 1024,
stop: [PUBLIC_SEP_TOKEN],
return_full_text: false,
},
},
{
id: messageId,
is_retry: isRetry,
use_cache: false,
}
);
for await (const data of response) {
pending = false;
if (!data) {
break;
}
if (conversationId !== $page.params.id) {
fetch(`${base}/conversation/${conversationId}/stop-generating`, {
method: "POST",
}).catch(console.error);
break;
}
if (isAborted) {
isAborted = false;
fetch(`${base}/conversation/${conversationId}/stop-generating`, {
method: "POST",
}).catch(console.error);
break;
}
// final message
if (data.generated_text) {
const lastMessage = messages.at(-1);
if (lastMessage) {
lastMessage.content = data.generated_text;
messages = [...messages];
}
break;
}
if (!data.token.special) {
const lastMessage = messages.at(-1);
if (lastMessage?.from !== "assistant") {
// First token has a space at the beginning, trim it
messages = [
...messages,
// id doesn't match the backend id but it's not important for assistant messages
{ from: "assistant", content: data.token.text.trimStart(), id: randomUUID() },
];
} else {
lastMessage.content += data.token.text;
messages = [...messages];
}
}
}
}
async function summarizeTitle(id: string) {
await fetch(`${base}/conversation/${id}/summarize`, {
method: "POST",
});
}
async function writeMessage(message: string, messageId = crypto.randomUUID()) {
if (!message.trim()) return;
try {
isAborted = false;
loading = true;
pending = true;
let retryMessageIndex = messages.findIndex((msg) => msg.id === messageId);
const isRetry = retryMessageIndex !== -1;
if (!isRetry) {
retryMessageIndex = messages.length;
}
messages = [
...messages.slice(0, retryMessageIndex),
{ from: "user", content: message, id: messageId },
];
await getTextGenerationStream(message, messageId, isRetry);
if (messages.filter((m) => m.from === "user").length === 1) {
summarizeTitle($page.params.id)
.then(() => invalidate(UrlDependency.ConversationList))
.catch(console.error);
} else {
await invalidate(UrlDependency.ConversationList);
}
} catch (err) {
// TODO: Should prob check if this is really a TooManyRequests error
$error = "Too much traffic, please try again.";
console.error(err);
} finally {
loading = false;
}
}
onMount(async () => {
if ($pendingMessage) {
const val = $pendingMessage;
const messageId = $pendingMessageIdToRetry || undefined;
$pendingMessage = "";
$pendingMessageIdToRetry = null;
writeMessage(val, messageId);
}
});
$: title = data.conversations.find((conv) => conv.id === $page.params.id)?.title ?? data.title;
</script>
<svelte:head>
<title>{title}</title>
</svelte:head>
<ChatWindow
{loading}
{pending}
{messages}
on:message={(message) => writeMessage(message.detail)}
on:retry={(message) => writeMessage(message.detail.content, message.detail.id)}
on:share={() => shareConversation($page.params.id, data.title)}
on:stop={() => (isAborted = true)}
/>
|