coyotte508 HF staff commited on
Commit
06ffd82
1 Parent(s): f91689a

⬆️ @huggingface/inference v2.1.1 (#83)

Browse files
package-lock.json CHANGED
@@ -8,7 +8,7 @@
8
  "name": "chat-ui",
9
  "version": "0.0.1",
10
  "dependencies": {
11
- "@huggingface/inference": "^2.0.0-rc2",
12
  "autoprefixer": "^10.4.14",
13
  "date-fns": "^2.29.3",
14
  "dotenv": "^16.0.3",
@@ -473,9 +473,9 @@
473
  }
474
  },
475
  "node_modules/@huggingface/inference": {
476
- "version": "2.0.0-rc2",
477
- "resolved": "https://registry.npmjs.org/@huggingface/inference/-/inference-2.0.0-rc2.tgz",
478
- "integrity": "sha512-jCU+zl1fmbmaWbTa3P2KC1GQMwjMc9ZLcR8Nq5UeFmDbmdz0GiBHsK7F1hTHRAPeF1K3X5L3V3OQUNZ6nzEu/w==",
479
  "engines": {
480
  "node": ">=18"
481
  }
 
8
  "name": "chat-ui",
9
  "version": "0.0.1",
10
  "dependencies": {
11
+ "@huggingface/inference": "^2.1.2",
12
  "autoprefixer": "^10.4.14",
13
  "date-fns": "^2.29.3",
14
  "dotenv": "^16.0.3",
 
473
  }
474
  },
475
  "node_modules/@huggingface/inference": {
476
+ "version": "2.1.2",
477
+ "resolved": "https://registry.npmjs.org/@huggingface/inference/-/inference-2.1.2.tgz",
478
+ "integrity": "sha512-Qb+yVq13a1FVOLaQK0GOwDfp95VfIWP7uR83tAyluSkvpwAHaByVKw4zNGwMl3u3FxoDRn6snNGo54wTWApPzw==",
479
  "engines": {
480
  "node": ">=18"
481
  }
package.json CHANGED
@@ -33,7 +33,7 @@
33
  },
34
  "type": "module",
35
  "dependencies": {
36
- "@huggingface/inference": "^2.0.0-rc2",
37
  "autoprefixer": "^10.4.14",
38
  "date-fns": "^2.29.3",
39
  "dotenv": "^16.0.3",
 
33
  },
34
  "type": "module",
35
  "dependencies": {
36
+ "@huggingface/inference": "^2.1.2",
37
  "autoprefixer": "^10.4.14",
38
  "date-fns": "^2.29.3",
39
  "dotenv": "^16.0.3",
src/routes/conversation/[id]/+page.svelte CHANGED
@@ -4,7 +4,7 @@
4
  import { onMount } from "svelte";
5
  import type { PageData } from "./$types";
6
  import { page } from "$app/stores";
7
- import { HfInference } from "@huggingface/inference";
8
  import { invalidate } from "$app/navigation";
9
  import { base } from "$app/paths";
10
  import { trimSuffix } from "$lib/utils/trimSuffix";
@@ -24,16 +24,15 @@
24
  lastLoadedMessages = data.messages;
25
  }
26
 
27
- const hf = new HfInference();
28
-
29
  let loading = false;
30
  let pending = false;
31
 
32
  async function getTextGenerationStream(inputs: string) {
33
  let conversationId = $page.params.id;
34
 
35
- const response = hf.endpoint($page.url.href).textGenerationStream(
36
  {
 
37
  inputs,
38
  parameters: {
39
  // Taken from https://huggingface.co/spaces/huggingface/open-assistant-private-testing/blob/main/app.py#L54
 
4
  import { onMount } from "svelte";
5
  import type { PageData } from "./$types";
6
  import { page } from "$app/stores";
7
+ import { textGenerationStream } from "@huggingface/inference";
8
  import { invalidate } from "$app/navigation";
9
  import { base } from "$app/paths";
10
  import { trimSuffix } from "$lib/utils/trimSuffix";
 
24
  lastLoadedMessages = data.messages;
25
  }
26
 
 
 
27
  let loading = false;
28
  let pending = false;
29
 
30
  async function getTextGenerationStream(inputs: string) {
31
  let conversationId = $page.params.id;
32
 
33
+ const response = textGenerationStream(
34
  {
35
+ model: $page.url.href,
36
  inputs,
37
  parameters: {
38
  // Taken from https://huggingface.co/spaces/huggingface/open-assistant-private-testing/blob/main/app.py#L54
src/routes/conversation/[id]/summarize/+server.ts CHANGED
@@ -2,6 +2,7 @@ import { HF_TOKEN } from "$env/static/private";
2
  import { PUBLIC_MODEL_ENDPOINT } from "$env/static/public";
3
  import { buildPrompt } from "$lib/buildPrompt";
4
  import { collections } from "$lib/server/database.js";
 
5
  import { error } from "@sveltejs/kit";
6
  import { ObjectId } from "mongodb";
7
 
@@ -25,54 +26,52 @@ export async function POST({ params, locals, fetch }) {
25
 
26
  const prompt = buildPrompt([{ from: "user", content: userPrompt }]);
27
 
28
- const resp = await fetch(PUBLIC_MODEL_ENDPOINT, {
29
- headers: {
30
- "Content-Type": "application/json",
31
- Authorization: `Basic ${HF_TOKEN}`,
32
- },
33
- method: "POST",
34
- body: JSON.stringify({
35
- inputs: prompt,
36
- parameters: {
37
- temperature: 0.9,
38
- top_p: 0.95,
39
- repetition_penalty: 1.2,
40
- top_k: 50,
41
- watermark: false,
42
- max_new_tokens: 1024,
43
- stop: ["<|endoftext|>"],
44
- return_full_text: false,
45
- },
46
- }),
47
- });
48
 
49
- const response = await resp.json();
50
- let generatedTitle: string | undefined;
51
- try {
52
- if (typeof response[0].generated_text === "string") {
53
- generatedTitle = response[0].generated_text;
 
 
 
 
 
 
 
 
 
 
54
  }
55
- } catch {
56
- console.error("summarization failed");
57
- }
58
 
59
- if (generatedTitle) {
60
  await collections.conversations.updateOne(
61
  {
62
  _id: convId,
63
  sessionId: locals.sessionId,
64
  },
65
  {
66
- $set: { title: generatedTitle },
67
  }
68
  );
69
  }
70
 
71
  return new Response(
72
  JSON.stringify(
73
- generatedTitle
74
  ? {
75
- title: generatedTitle,
76
  }
77
  : {}
78
  ),
 
2
  import { PUBLIC_MODEL_ENDPOINT } from "$env/static/public";
3
  import { buildPrompt } from "$lib/buildPrompt";
4
  import { collections } from "$lib/server/database.js";
5
+ import { textGeneration } from "@huggingface/inference";
6
  import { error } from "@sveltejs/kit";
7
  import { ObjectId } from "mongodb";
8
 
 
26
 
27
  const prompt = buildPrompt([{ from: "user", content: userPrompt }]);
28
 
29
+ const parameters = {
30
+ temperature: 0.9,
31
+ top_p: 0.95,
32
+ repetition_penalty: 1.2,
33
+ top_k: 50,
34
+ watermark: false,
35
+ max_new_tokens: 1024,
36
+ stop: ["<|endoftext|>"],
37
+ return_full_text: false,
38
+ };
 
 
 
 
 
 
 
 
 
 
39
 
40
+ const { generated_text } = await textGeneration(
41
+ {
42
+ model: PUBLIC_MODEL_ENDPOINT,
43
+ inputs: prompt,
44
+ parameters,
45
+ },
46
+ {
47
+ fetch: (url, options) =>
48
+ fetch(url, {
49
+ ...options,
50
+ headers: {
51
+ ...options?.headers,
52
+ Authorization: `Basic ${HF_TOKEN}`,
53
+ },
54
+ }),
55
  }
56
+ );
 
 
57
 
58
+ if (generated_text) {
59
  await collections.conversations.updateOne(
60
  {
61
  _id: convId,
62
  sessionId: locals.sessionId,
63
  },
64
  {
65
+ $set: { title: generated_text },
66
  }
67
  );
68
  }
69
 
70
  return new Response(
71
  JSON.stringify(
72
+ generated_text
73
  ? {
74
+ title: generated_text,
75
  }
76
  : {}
77
  ),