itaybar nsarrazin HF staff commited on
Commit
d779ad0
1 Parent(s): 28c3bea

Added optional llm summerization (#498)

Browse files

* Added optional llm summerization

* set summery as first 5 words

Co-authored-by: Nathan Sarrazin <[email protected]>

* delete unused file

* Pass stop sequences properly to TGI (#499)

* Pass stop sequences properly to TGI

* fix types

* Add a job in release action that updates the .env in prod (#493)

* Add a workflow that updates the .env in prod

* move update env to release workflow

* version bumps on actons

* flipped config

* black

* Save user message on request (#337) (#492)

Closes #337

* bump inference version

* lint

---------

Co-authored-by: Nathan Sarrazin <[email protected]>

Files changed (2) hide show
  1. .env +1 -0
  2. src/lib/server/summarize.ts +4 -0
.env CHANGED
@@ -87,6 +87,7 @@ PUBLIC_APP_ASSETS=chatui # used to find logos & favicons in static/$PUBLIC_APP_A
87
  PUBLIC_APP_COLOR=blue # can be any of tailwind colors: https://tailwindcss.com/docs/customizing-colors#default-color-palette
88
  PUBLIC_APP_DATA_SHARING=#set to 1 to enable options & text regarding data sharing
89
  PUBLIC_APP_DISCLAIMER=#set to 1 to show a disclaimer on login page
 
90
 
91
  # PUBLIC_APP_NAME=HuggingChat
92
  # PUBLIC_APP_ASSETS=huggingchat
 
87
  PUBLIC_APP_COLOR=blue # can be any of tailwind colors: https://tailwindcss.com/docs/customizing-colors#default-color-palette
88
  PUBLIC_APP_DATA_SHARING=#set to 1 to enable options & text regarding data sharing
89
  PUBLIC_APP_DISCLAIMER=#set to 1 to show a disclaimer on login page
90
+ LLM_SUMMERIZATION=true
91
 
92
  # PUBLIC_APP_NAME=HuggingChat
93
  # PUBLIC_APP_ASSETS=huggingchat
src/lib/server/summarize.ts CHANGED
@@ -1,8 +1,12 @@
1
  import { buildPrompt } from "$lib/buildPrompt";
2
  import { generateFromDefaultEndpoint } from "$lib/server/generateFromDefaultEndpoint";
3
  import { defaultModel } from "$lib/server/models";
 
4
 
5
  export async function summarize(prompt: string) {
 
 
 
6
  const userPrompt = `Please summarize the following message: \n` + prompt;
7
 
8
  const summaryPrompt = await buildPrompt({
 
1
  import { buildPrompt } from "$lib/buildPrompt";
2
  import { generateFromDefaultEndpoint } from "$lib/server/generateFromDefaultEndpoint";
3
  import { defaultModel } from "$lib/server/models";
4
+ import { LLM_SUMMERIZATION } from "$env/static/private";
5
 
6
  export async function summarize(prompt: string) {
7
+ if (!LLM_SUMMERIZATION) {
8
+ return prompt.split(/\s+/g).slice(0, 5).join(" ");
9
+ }
10
  const userPrompt = `Please summarize the following message: \n` + prompt;
11
 
12
  const summaryPrompt = await buildPrompt({