Sarah Ciston commited on
Commit
877944c
1 Parent(s): 630ac98

default model, set hyperparams

Browse files
Files changed (2) hide show
  1. README.md +2 -2
  2. sketch.js +11 -8
README.md CHANGED
@@ -11,8 +11,8 @@ models:
11
  # - Xenova/detr-resnet-50
12
  # - Xenova/gpt2
13
  # - Xenova/bloom-560m
14
- # - Xenova/distilgpt2
15
- - Xenova/gpt-3.5-turbo
16
  # - Xenova/llama-68m
17
  # - Xenova/LaMini-Flan-T5-783M
18
  # - mistralai/Mistral-7B-Instruct-v0.2
 
11
  # - Xenova/detr-resnet-50
12
  # - Xenova/gpt2
13
  # - Xenova/bloom-560m
14
+ - Xenova/distilgpt2
15
+ # - Xenova/gpt-3.5-turbo
16
  # - Xenova/llama-68m
17
  # - Xenova/LaMini-Flan-T5-783M
18
  # - mistralai/Mistral-7B-Instruct-v0.2
sketch.js CHANGED
@@ -175,17 +175,20 @@ async function runModel(PREPROMPT, PROMPT){
175
  // // Chat completion API
176
 
177
  // pipeline/transformers version TEST
178
- let pipe = await pipeline('text-generation', 'Xenova/gpt-3.5-turbo');
179
 
180
- // , 'meta-llama/Meta-Llama-3-70B-Instruct'
181
- // , 'openai-community/gpt2'
 
182
 
183
- // out = await pipe((PREPROMPT, PROMPT), {
184
- // max_tokens: 150,
185
- // return_full_text: false
186
- // })
 
 
187
 
188
- out = await pipe((PREPROMPT, PROMPT))
189
 
190
  console.log(out)
191
 
 
175
  // // Chat completion API
176
 
177
  // pipeline/transformers version TEST
178
+ let pipe = await pipeline('text-generation', 'Xenova/distilgpt2');
179
 
180
+ // 'meta-llama/Meta-Llama-3-70B-Instruct'
181
+ // 'openai-community/gpt2'
182
+ // 'Xenova/gpt-3.5-turbo'
183
 
184
+ out = await pipe((PREPROMPT, PROMPT), {
185
+ max_tokens: 250,
186
+ return_full_text: false,
187
+ repetition_penalty: 1.5,
188
+ num_return_sequences: 2
189
+ })
190
 
191
+ // out = await pipe((PREPROMPT, PROMPT))
192
 
193
  console.log(out)
194