Sarah Ciston commited on
Commit
5ef1468
·
1 Parent(s): 546cd8a

change options syntax for model settings

Browse files
Files changed (2) hide show
  1. README.md +4 -3
  2. sketch.js +7 -2
README.md CHANGED
@@ -6,15 +6,16 @@ colorTo: blue
6
  sdk: static
7
  pinned: false
8
  models:
 
9
  # - meta-llama/Meta-Llama-3-70B-Instruct
10
  # - Xenova/detr-resnet-50
11
- - Xenova/gpt2
12
  # - Xenova/bloom-560m
13
- - Xenova/distilgpt2
14
  # - Xenova/gpt-3.5-turbo
15
  # - Xenova/llama-68m
16
  # - Xenova/LaMini-Flan-T5-783M
17
- - mistralai/Mistral-7B-Instruct-v0.2
18
  # - meta-llama/Meta-Llama-3-8B
19
 
20
  ---
 
6
  sdk: static
7
  pinned: false
8
  models:
9
+ - openai-community/gpt2
10
  # - meta-llama/Meta-Llama-3-70B-Instruct
11
  # - Xenova/detr-resnet-50
12
+ # - Xenova/gpt2
13
  # - Xenova/bloom-560m
14
+ # - Xenova/distilgpt2
15
  # - Xenova/gpt-3.5-turbo
16
  # - Xenova/llama-68m
17
  # - Xenova/LaMini-Flan-T5-783M
18
+ # - mistralai/Mistral-7B-Instruct-v0.2
19
  # - meta-llama/Meta-Llama-3-8B
20
 
21
  ---
sketch.js CHANGED
@@ -207,11 +207,16 @@ async function runModel(PREPROMPT, PROMPT){
207
  // return modelResult
208
 
209
  // pipeline/transformers version TEST
210
- let pipe = await pipeline('text-generation');
211
 
212
  // , 'meta-llama/Meta-Llama-3-70B-Instruct'
 
213
 
214
- out = pipe((PREPROMPT, PROMPT), return_full_text = false)
 
 
 
 
215
 
216
  console.log(out)
217
 
 
207
  // return modelResult
208
 
209
  // pipeline/transformers version TEST
210
+ let pipe = await pipeline('text-generation', 'openai-community/gpt2');
211
 
212
  // , 'meta-llama/Meta-Llama-3-70B-Instruct'
213
+ // , 'openai-community/gpt2'
214
 
215
+ out = await pipe((PREPROMPT, PROMPT), {
216
+ max_tokens: 150,
217
+ num_return_sequences: 2,
218
+ return_full_text: false
219
+ })
220
 
221
  console.log(out)
222