Sarah Ciston
commited on
Commit
·
546cd8a
1
Parent(s):
0ed7c61
remove unrecognized option in model settings
Browse files
sketch.js
CHANGED
@@ -16,14 +16,9 @@ env.allowLocalModels = false;
|
|
16 |
|
17 |
// establish global variables
|
18 |
|
19 |
-
let
|
20 |
-
|
21 |
-
|
22 |
-
submitButton,
|
23 |
-
addButton,
|
24 |
-
promptInput,
|
25 |
-
modelDisplay,
|
26 |
-
modelResult;
|
27 |
|
28 |
// pick a model (see list of models)
|
29 |
// INFERENCE MODELS
|
@@ -33,7 +28,6 @@ let PROMPT,
|
|
33 |
// const detector = await pipeline('text-generation', 'meta-llama/Meta-Llama-3-8B', 'Xenova/LaMini-Flan-T5-783M');
|
34 |
|
35 |
|
36 |
-
let blankArray = []
|
37 |
|
38 |
|
39 |
///// p5 STUFF
|
@@ -217,7 +211,7 @@ async function runModel(PREPROMPT, PROMPT){
|
|
217 |
|
218 |
// , 'meta-llama/Meta-Llama-3-70B-Instruct'
|
219 |
|
220 |
-
out = pipe((PREPROMPT, PROMPT),
|
221 |
|
222 |
console.log(out)
|
223 |
|
|
|
16 |
|
17 |
// establish global variables
|
18 |
|
19 |
+
let promptInput
|
20 |
+
|
21 |
+
let blankArray = []
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
// pick a model (see list of models)
|
24 |
// INFERENCE MODELS
|
|
|
28 |
// const detector = await pipeline('text-generation', 'meta-llama/Meta-Llama-3-8B', 'Xenova/LaMini-Flan-T5-783M');
|
29 |
|
30 |
|
|
|
31 |
|
32 |
|
33 |
///// p5 STUFF
|
|
|
211 |
|
212 |
// , 'meta-llama/Meta-Llama-3-70B-Instruct'
|
213 |
|
214 |
+
out = pipe((PREPROMPT, PROMPT), return_full_text = false)
|
215 |
|
216 |
console.log(out)
|
217 |
|