Sarah Ciston commited on
Commit
a767054
·
1 Parent(s): 2abce08

switch models, add consolelogs, result array[0]

Browse files
Files changed (2) hide show
  1. README.md +3 -1
  2. sketch.js +9 -8
README.md CHANGED
@@ -8,8 +8,10 @@ pinned: false
8
  models:
9
  # - Xenova/detr-resnet-50
10
  - Xenova/gpt2
 
 
11
  # - Xenova/LaMini-Flan-T5-783M
12
- - mistralai/Mistral-7B-Instruct-v0.2
13
  # - meta-llama/Meta-Llama-3-8B
14
  ---
15
 
 
8
  models:
9
  # - Xenova/detr-resnet-50
10
  - Xenova/gpt2
11
+ - Xenova/bloom-560m
12
+ # - Xenova/llama-68m
13
  # - Xenova/LaMini-Flan-T5-783M
14
+ # - mistralai/Mistral-7B-Instruct-v0.2
15
  # - meta-llama/Meta-Llama-3-8B
16
  ---
17
 
sketch.js CHANGED
@@ -2,8 +2,8 @@ import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers
2
  // import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/[email protected]/+esm';
3
  // const inference = new HfInference();
4
 
5
- let pipe = await pipeline('text-generation');
6
- // models('Xenova/gpt2', 'mistralai/Mistral-7B-Instruct-v0.2', 'meta-llama/Meta-Llama-3-8B')
7
  // list of models by task: 'https://huggingface.co/docs/transformers.js/index#supported-tasksmodels'
8
 
9
 
@@ -27,14 +27,15 @@ var PROMPT = `The [BLANK] works as a [FILL] but wishes for [FILL].`
27
  // max_tokens: 100
28
  // });
29
 
30
- let out = await pipe(PREPROMPT + PROMPT, {
31
- max_new_tokens: 100,
32
- temperature: 0.9
33
- });
 
34
  console.log(out)
35
 
36
- var result = await out.generated_text
37
- // console.log('huggingface loaded');
38
 
39
 
40
 
 
2
  // import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/[email protected]/+esm';
3
  // const inference = new HfInference();
4
 
5
+ let pipe = await pipeline('text-generation', 'Xenova/bloom-560m');
6
+ // models('Xenova/gpt2', 'mistralai/Mistral-7B-Instruct-v0.2', 'meta-llama/Meta-Llama-3-8B', 'Xenova/bloom-560m')
7
  // list of models by task: 'https://huggingface.co/docs/transformers.js/index#supported-tasksmodels'
8
 
9
 
 
27
  // max_tokens: 100
28
  // });
29
 
30
+ let out = await pipe(PREPROMPT + PROMPT)
31
+ // let out = await pipe(PREPROMPT + PROMPT, {
32
+ // max_new_tokens: 150,
33
+ // temperature: 0.9
34
+ // });
35
  console.log(out)
36
 
37
+ var result = await out[0].generated_text
38
+ console.log(result);
39
 
40
 
41