nsarrazin HF staff Mishig commited on
Commit
29b7d2a
1 Parent(s): a3a5e3c

Update models and add check for assistants model on startup (#998)

Browse files

* Remove old models from .env.template

* Make sure we always have a visible model in assistant settings

* Always show correct info in chat window

* Add migration that runs on every server start to update assistant model

* change key

* fix tests

* Update src/lib/migrations/migrations.ts

Co-authored-by: Mishig <[email protected]>

* review

* Update src/lib/components/chat/ChatWindow.svelte

Co-authored-by: Mishig <[email protected]>

* revert duplicate Model:

* simplify deprecated check

---------

Co-authored-by: Mishig <[email protected]>

.env.template CHANGED
@@ -60,35 +60,6 @@ MODELS=`[
60
  }
61
  },
62
  {
63
- "name": "meta-llama/Llama-2-70b-chat-hf",
64
- "description": "The latest and biggest model from Meta, fine-tuned for chat.",
65
- "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/meta-logo.png",
66
- "websiteUrl": "https://ai.meta.com/llama/",
67
- "preprompt": "",
68
- "chatPromptTemplate" : "<s>[INST] <<SYS>>\n{{preprompt}}\n<</SYS>>\n\n{{#each messages}}{{#ifUser}}{{content}} [/INST] {{/ifUser}}{{#ifAssistant}}{{content}} </s><s>[INST] {{/ifAssistant}}{{/each}}",
69
- "promptExamples": [
70
- {
71
- "title": "Write an email from bullet list",
72
- "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
73
- }, {
74
- "title": "Code a snake game",
75
- "prompt": "Code a basic snake game in python, give explanations for each step."
76
- }, {
77
- "title": "Assist in a task",
78
- "prompt": "How do I make a delicious lemon cheesecake?"
79
- }
80
- ],
81
- "parameters": {
82
- "temperature": 0.1,
83
- "top_p": 0.95,
84
- "repetition_penalty": 1.2,
85
- "top_k": 50,
86
- "truncate": 3072,
87
- "max_new_tokens": 1024,
88
- "stop" : ["</s>", "</s><s>[INST]"]
89
- }
90
- },
91
- {
92
  "name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
93
  "description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.",
94
  "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/nous-logo.png",
@@ -118,37 +89,6 @@ MODELS=`[
118
  "stop": ["<|im_end|>"]
119
  }
120
  },
121
- {
122
- "name": "codellama/CodeLlama-70b-Instruct-hf",
123
- "displayName": "codellama/CodeLlama-70b-Instruct-hf",
124
- "description": "Code Llama, a state of the art code model from Meta. Now in 70B!",
125
- "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/meta-logo.png",
126
- "websiteUrl": "https://ai.meta.com/blog/code-llama-large-language-model-coding/",
127
- "modelUrl": "https://huggingface.co/codellama/CodeLlama-70b-Instruct-hf",
128
- "preprompt": "",
129
- "chatPromptTemplate" : "<s>{{#if @root.preprompt}}Source: system\n\n {{@root.preprompt}} <step> {{/if}}{{#each messages}}{{#ifUser}}Source: user\n\n {{content}} <step> {{/ifUser}}{{#ifAssistant}}Source: assistant\n\n {{content}} <step> {{/ifAssistant}}{{/each}}Source: assistant\nDestination: user\n\n ",
130
- "promptExamples": [
131
- {
132
- "title": "Fibonacci in Python",
133
- "prompt": "Write a python function to calculate the nth fibonacci number."
134
- }, {
135
- "title": "JavaScript promises",
136
- "prompt": "How can I wait for multiple JavaScript promises to fulfill before doing something with their values?"
137
- }, {
138
- "title": "Rust filesystem",
139
- "prompt": "How can I load a file from disk in Rust?"
140
- }
141
- ],
142
- "parameters": {
143
- "temperature": 0.1,
144
- "top_p": 0.95,
145
- "repetition_penalty": 1.2,
146
- "top_k": 50,
147
- "truncate": 4096,
148
- "max_new_tokens": 4096,
149
- "stop": ["<step>", " <step>", " <step> "],
150
- }
151
- },
152
  {
153
  "name": "mistralai/Mistral-7B-Instruct-v0.1",
154
  "displayName": "mistralai/Mistral-7B-Instruct-v0.1",
@@ -214,38 +154,6 @@ MODELS=`[
214
  }
215
  ]
216
  },
217
- {
218
- "name": "openchat/openchat-3.5-0106",
219
- "displayName": "openchat/openchat-3.5-0106",
220
- "description": "OpenChat 3.5 is the #1 model on MT-Bench, with only 7B parameters.",
221
- "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/openchat-logo.png",
222
- "websiteUrl": "https://huggingface.co/openchat/openchat-3.5-0106",
223
- "modelUrl": "https://huggingface.co/openchat/openchat-3.5-0106",
224
- "tokenizer": "openchat/openchat-3.5-0106",
225
- "preprompt": "",
226
- "chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}GPT4 Correct User: {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}}<|end_of_turn|>GPT4 Correct Assistant:{{/ifUser}}{{#ifAssistant}}{{content}}<|end_of_turn|>{{/ifAssistant}}{{/each}}",
227
- "parameters": {
228
- "temperature": 0.6,
229
- "top_p": 0.95,
230
- "repetition_penalty": 1.2,
231
- "top_k": 50,
232
- "truncate": 6016,
233
- "max_new_tokens": 2048,
234
- "stop": ["<|end_of_turn|>"]
235
- },
236
- "promptExamples": [
237
- {
238
- "title": "Write an email from bullet list",
239
- "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
240
- }, {
241
- "title": "Code a snake game",
242
- "prompt": "Code a basic snake game in python, give explanations for each step."
243
- }, {
244
- "title": "Assist in a task",
245
- "prompt": "How do I make a delicious lemon cheesecake?"
246
- }
247
- ]
248
- }
249
  ]`
250
 
251
  OLD_MODELS=`[
@@ -256,7 +164,10 @@ OLD_MODELS=`[
256
  {"name":"openchat/openchat-3.5-1210"},
257
  {"name": "tiiuae/falcon-180B-chat"},
258
  {"name": "codellama/CodeLlama-34b-Instruct-hf"},
259
- {"name": "google/gemma-7b-it"}
 
 
 
260
  ]`
261
 
262
  TASK_MODEL='mistralai/Mistral-7B-Instruct-v0.1'
 
60
  }
61
  },
62
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  "name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
64
  "description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.",
65
  "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/nous-logo.png",
 
89
  "stop": ["<|im_end|>"]
90
  }
91
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
  {
93
  "name": "mistralai/Mistral-7B-Instruct-v0.1",
94
  "displayName": "mistralai/Mistral-7B-Instruct-v0.1",
 
154
  }
155
  ]
156
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
157
  ]`
158
 
159
  OLD_MODELS=`[
 
164
  {"name":"openchat/openchat-3.5-1210"},
165
  {"name": "tiiuae/falcon-180B-chat"},
166
  {"name": "codellama/CodeLlama-34b-Instruct-hf"},
167
+ {"name": "google/gemma-7b-it"},
168
+ {"name": "meta-llama/Llama-2-70b-chat-hf"},
169
+ {"name": "codellama/CodeLlama-70b-Instruct-hf"},
170
+ {"name": "openchat/openchat-3.5-0106"}
171
  ]`
172
 
173
  TASK_MODEL='mistralai/Mistral-7B-Instruct-v0.1'
src/lib/components/AssistantSettings.svelte CHANGED
@@ -17,6 +17,7 @@
17
  import IconInternet from "./icons/IconInternet.svelte";
18
  import TokensCounter from "./TokensCounter.svelte";
19
  import HoverTooltip from "./HoverTooltip.svelte";
 
20
 
21
  type ActionData = {
22
  error: boolean;
@@ -45,11 +46,7 @@
45
  const module = await import("browser-image-resizer");
46
  compress = module.readAndCompressImage;
47
 
48
- if (assistant) {
49
- modelId = assistant.modelId;
50
- } else {
51
- modelId = models.find((model) => model.id === $settings.activeModel)?.id ?? models[0].id;
52
- }
53
  });
54
 
55
  let inputMessage1 = assistant?.exampleInputs[0] ?? "";
 
17
  import IconInternet from "./icons/IconInternet.svelte";
18
  import TokensCounter from "./TokensCounter.svelte";
19
  import HoverTooltip from "./HoverTooltip.svelte";
20
+ import { findCurrentModel } from "$lib/utils/models";
21
 
22
  type ActionData = {
23
  error: boolean;
 
46
  const module = await import("browser-image-resizer");
47
  compress = module.readAndCompressImage;
48
 
49
+ modelId = findCurrentModel(models, assistant ? assistant.modelId : $settings.activeModel).id;
 
 
 
 
50
  });
51
 
52
  let inputMessage1 = assistant?.exampleInputs[0] ?? "";
src/lib/components/chat/ChatWindow.svelte CHANGED
@@ -309,7 +309,9 @@
309
  <ChatInput value="Sorry, something went wrong. Please try again." disabled={true} />
310
  {:else}
311
  <ChatInput
312
- placeholder="Ask anything"
 
 
313
  bind:value={message}
314
  on:submit={handleSubmit}
315
  on:beforeinput={(ev) => {
@@ -353,16 +355,33 @@
353
  <p>
354
  Model:
355
  {#if !assistant}
356
- <a href="{base}/settings/{currentModel.id}" class="hover:underline"
357
- >{currentModel.displayName}</a
358
- >{:else}
359
- {@const model = models.find((m) => m.id === assistant?.modelId)}
360
- <a
361
- href="{base}/settings/assistants/{assistant._id}"
362
- class="inline-flex items-center border-b hover:text-gray-600 dark:border-gray-700 dark:hover:text-gray-300"
363
- >{model?.displayName}<CarbonCaretDown class="text-xxs" /></a
364
- >{/if} <span class="max-sm:hidden">·</span><br class="sm:hidden" /> Generated content may
365
- be inaccurate or false.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
366
  </p>
367
  {#if messages.length}
368
  <button
 
309
  <ChatInput value="Sorry, something went wrong. Please try again." disabled={true} />
310
  {:else}
311
  <ChatInput
312
+ placeholder={isReadOnly
313
+ ? "This conversation is read-only. Start a new one to continue!"
314
+ : "Ask anything"}
315
  bind:value={message}
316
  on:submit={handleSubmit}
317
  on:beforeinput={(ev) => {
 
355
  <p>
356
  Model:
357
  {#if !assistant}
358
+ {#if models.find((m) => m.id === currentModel.id)}
359
+ <a
360
+ href="{base}/settings/{currentModel.id}"
361
+ class="inline-flex items-center hover:underline"
362
+ >{currentModel.displayName}<CarbonCaretDown class="text-xxs" /></a
363
+ >
364
+ {:else}
365
+ <span class="inline-flex items-center line-through dark:border-gray-700">
366
+ {currentModel.id}
367
+ </span>
368
+ {/if}
369
+ {:else}
370
+ {@const model = models.find((m) => m.id === currentModel.id)}
371
+ {#if model}
372
+ <a
373
+ href="{base}/settings/assistants/{assistant._id}"
374
+ class="inline-flex items-center border-b hover:text-gray-600 dark:border-gray-700 dark:hover:text-gray-300"
375
+ >{model?.displayName}<CarbonCaretDown class="text-xxs" /></a
376
+ >
377
+ {:else}
378
+ <span class="inline-flex items-center line-through dark:border-gray-700">
379
+ {currentModel.id}
380
+ </span>
381
+ {/if}
382
+ {/if}
383
+ <span class="max-sm:hidden">·</span><br class="sm:hidden" /> Generated content may be inaccurate
384
+ or false.
385
  </p>
386
  {#if messages.length}
387
  <button
src/lib/migrations/migrations.spec.ts CHANGED
@@ -3,7 +3,7 @@ import { migrations } from "./routines";
3
  import { acquireLock, isDBLocked, refreshLock, releaseLock } from "./lock";
4
  import { collections } from "$lib/server/database";
5
 
6
- const LOCK_KEY = "migrations";
7
 
8
  describe("migrations", () => {
9
  it("should not have duplicates guid", async () => {
@@ -21,7 +21,7 @@ describe("migrations", () => {
21
  expect(locks.length).toBe(1);
22
  expect(semaphores).toBeDefined();
23
  expect(semaphores.length).toBe(1);
24
- expect(semaphores?.[0].key).toBe("migrations");
25
  });
26
 
27
  it("should read the lock correctly", async () => {
 
3
  import { acquireLock, isDBLocked, refreshLock, releaseLock } from "./lock";
4
  import { collections } from "$lib/server/database";
5
 
6
+ const LOCK_KEY = "migrations.test";
7
 
8
  describe("migrations", () => {
9
  it("should not have duplicates guid", async () => {
 
21
  expect(locks.length).toBe(1);
22
  expect(semaphores).toBeDefined();
23
  expect(semaphores.length).toBe(1);
24
+ expect(semaphores?.[0].key).toBe(LOCK_KEY);
25
  });
26
 
27
  it("should read the lock correctly", async () => {
src/lib/migrations/migrations.ts CHANGED
@@ -14,14 +14,6 @@ export async function checkAndRunMigrations() {
14
  // check if all migrations have already been run
15
  const migrationResults = await collections.migrationResults.find().toArray();
16
 
17
- // if all the migrations._id are in the migrationResults, we can exit early
18
- if (
19
- migrations.every((m) => migrationResults.some((m2) => m2._id.toString() === m._id.toString()))
20
- ) {
21
- console.log("[MIGRATIONS] All migrations already applied.");
22
- return;
23
- }
24
-
25
  console.log("[MIGRATIONS] Begin check...");
26
 
27
  // connect to the database
@@ -52,12 +44,12 @@ export async function checkAndRunMigrations() {
52
  // iterate over all migrations
53
  for (const migration of migrations) {
54
  // check if the migration has already been applied
55
- const existingMigrationResult = migrationResults.find(
56
- (m) => m._id.toString() === migration._id.toString()
57
- );
58
 
59
  // check if the migration has already been applied
60
- if (existingMigrationResult) {
61
  console.log(`[MIGRATIONS] "${migration.name}" already applied. Skipping...`);
62
  } else {
63
  // check the modifiers to see if some cases match
@@ -71,8 +63,12 @@ export async function checkAndRunMigrations() {
71
  continue;
72
  }
73
 
74
- // otherwise all is good and we cna run the migration
75
- console.log(`[MIGRATIONS] "${migration.name}" not applied yet. Applying...`);
 
 
 
 
76
 
77
  await collections.migrationResults.updateOne(
78
  { _id: migration._id },
 
14
  // check if all migrations have already been run
15
  const migrationResults = await collections.migrationResults.find().toArray();
16
 
 
 
 
 
 
 
 
 
17
  console.log("[MIGRATIONS] Begin check...");
18
 
19
  // connect to the database
 
44
  // iterate over all migrations
45
  for (const migration of migrations) {
46
  // check if the migration has already been applied
47
+ const shouldRun =
48
+ migration.runEveryTime ||
49
+ !migrationResults.find((m) => m._id.toString() === migration._id.toString());
50
 
51
  // check if the migration has already been applied
52
+ if (!shouldRun) {
53
  console.log(`[MIGRATIONS] "${migration.name}" already applied. Skipping...`);
54
  } else {
55
  // check the modifiers to see if some cases match
 
63
  continue;
64
  }
65
 
66
+ // otherwise all is good and we can run the migration
67
+ console.log(
68
+ `[MIGRATIONS] "${migration.name}" ${
69
+ migration.runEveryTime ? "should run every time" : "not applied yet"
70
+ }. Applying...`
71
+ );
72
 
73
  await collections.migrationResults.updateOne(
74
  { _id: migration._id },
src/lib/migrations/routines/02-update-assistants-models.ts ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import type { Migration } from ".";
2
+ import { getCollections } from "$lib/server/database";
3
+ import { ObjectId } from "mongodb";
4
+
5
+ const updateAssistantsModels: Migration = {
6
+ _id: new ObjectId("5f9f3f3f3f3f3f3f3f3f3f3f"),
7
+ name: "Update deprecated models in assistants with the default model",
8
+ up: async (client) => {
9
+ const models = (await import("$lib/server/models")).models;
10
+
11
+ const { assistants } = getCollections(client);
12
+
13
+ const modelIds = models.map((el) => el.id); // string[]
14
+ const defaultModelId = models[0].id;
15
+
16
+ // Find all assistants whose modelId is not in modelIds, and update it to use defaultModelId
17
+ await assistants.updateMany(
18
+ { modelId: { $nin: modelIds } },
19
+ { $set: { modelId: defaultModelId } }
20
+ );
21
+
22
+ return true;
23
+ },
24
+ runEveryTime: true,
25
+ runForHuggingChat: "only",
26
+ };
27
+
28
+ export default updateAssistantsModels;
src/lib/migrations/routines/index.ts CHANGED
@@ -1,6 +1,7 @@
1
  import type { MongoClient, ObjectId } from "mongodb";
2
 
3
  import updateSearchAssistant from "./01-update-search-assistants";
 
4
 
5
  export interface Migration {
6
  _id: ObjectId;
@@ -9,6 +10,7 @@ export interface Migration {
9
  down?: (client: MongoClient) => Promise<boolean>;
10
  runForFreshInstall?: "only" | "never"; // leave unspecified to run for both
11
  runForHuggingChat?: "only" | "never"; // leave unspecified to run for both
 
12
  }
13
 
14
- export const migrations: Migration[] = [updateSearchAssistant];
 
1
  import type { MongoClient, ObjectId } from "mongodb";
2
 
3
  import updateSearchAssistant from "./01-update-search-assistants";
4
+ import updateAssistantsModels from "./02-update-assistants-models";
5
 
6
  export interface Migration {
7
  _id: ObjectId;
 
10
  down?: (client: MongoClient) => Promise<boolean>;
11
  runForFreshInstall?: "only" | "never"; // leave unspecified to run for both
12
  runForHuggingChat?: "only" | "never"; // leave unspecified to run for both
13
+ runEveryTime?: boolean;
14
  }
15
 
16
+ export const migrations: Migration[] = [updateSearchAssistant, updateAssistantsModels];