Sarah Ciston
commited on
Commit
·
0ed7c61
1
Parent(s):
9a92806
try with default model?
Browse files
README.md
CHANGED
@@ -6,15 +6,15 @@ colorTo: blue
|
|
6 |
sdk: static
|
7 |
pinned: false
|
8 |
models:
|
9 |
-
- meta-llama/Meta-Llama-3-70B-Instruct
|
10 |
# - Xenova/detr-resnet-50
|
11 |
-
|
12 |
# - Xenova/bloom-560m
|
13 |
-
|
14 |
# - Xenova/gpt-3.5-turbo
|
15 |
# - Xenova/llama-68m
|
16 |
# - Xenova/LaMini-Flan-T5-783M
|
17 |
-
|
18 |
# - meta-llama/Meta-Llama-3-8B
|
19 |
|
20 |
---
|
|
|
6 |
sdk: static
|
7 |
pinned: false
|
8 |
models:
|
9 |
+
# - meta-llama/Meta-Llama-3-70B-Instruct
|
10 |
# - Xenova/detr-resnet-50
|
11 |
+
- Xenova/gpt2
|
12 |
# - Xenova/bloom-560m
|
13 |
+
- Xenova/distilgpt2
|
14 |
# - Xenova/gpt-3.5-turbo
|
15 |
# - Xenova/llama-68m
|
16 |
# - Xenova/LaMini-Flan-T5-783M
|
17 |
+
- mistralai/Mistral-7B-Instruct-v0.2
|
18 |
# - meta-llama/Meta-Llama-3-8B
|
19 |
|
20 |
---
|
sketch.js
CHANGED
@@ -10,7 +10,7 @@ import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers
|
|
10 |
|
11 |
|
12 |
// Since we will download the model from the Hugging Face Hub, we can skip the local model check
|
13 |
-
|
14 |
|
15 |
///////// VARIABLES
|
16 |
|
@@ -213,7 +213,9 @@ async function runModel(PREPROMPT, PROMPT){
|
|
213 |
// return modelResult
|
214 |
|
215 |
// pipeline/transformers version TEST
|
216 |
-
let pipe = await pipeline('text-generation'
|
|
|
|
|
217 |
|
218 |
out = pipe((PREPROMPT, PROMPT), num_return_sequences=3, return_full_text=false)
|
219 |
|
|
|
10 |
|
11 |
|
12 |
// Since we will download the model from the Hugging Face Hub, we can skip the local model check
|
13 |
+
env.allowLocalModels = false;
|
14 |
|
15 |
///////// VARIABLES
|
16 |
|
|
|
213 |
// return modelResult
|
214 |
|
215 |
// pipeline/transformers version TEST
|
216 |
+
let pipe = await pipeline('text-generation');
|
217 |
+
|
218 |
+
// , 'meta-llama/Meta-Llama-3-70B-Instruct'
|
219 |
|
220 |
out = pipe((PREPROMPT, PROMPT), num_return_sequences=3, return_full_text=false)
|
221 |
|