Sarah Ciston commited on
Commit
812da65
·
1 Parent(s): c30e160

switch back to default model

Browse files
Files changed (2) hide show
  1. README.md +1 -1
  2. sketch.js +16 -15
README.md CHANGED
@@ -6,13 +6,13 @@ colorTo: blue
6
  sdk: static
7
  pinned: false
8
  models:
 
9
  - HuggingFaceH4/zephyr-7b-beta
10
  # - openai-community/gpt2
11
  # - meta-llama/Meta-Llama-3-70B-Instruct
12
  # - Xenova/detr-resnet-50
13
  # - Xenova/gpt2
14
  # - Xenova/bloom-560m
15
- # - Xenova/distilgpt2
16
  # - Xenova/gpt-3.5-turbo
17
  # - Xenova/llama-68m
18
  # - Xenova/LaMini-Flan-T5-783M
 
6
  sdk: static
7
  pinned: false
8
  models:
9
+ - Xenova/distilgpt2
10
  - HuggingFaceH4/zephyr-7b-beta
11
  # - openai-community/gpt2
12
  # - meta-llama/Meta-Llama-3-70B-Instruct
13
  # - Xenova/detr-resnet-50
14
  # - Xenova/gpt2
15
  # - Xenova/bloom-560m
 
16
  # - Xenova/gpt-3.5-turbo
17
  # - Xenova/llama-68m
18
  # - Xenova/LaMini-Flan-T5-783M
sketch.js CHANGED
@@ -177,21 +177,19 @@ async function runModel(PREPROMPT, PROMPT){
177
  // num_return_sequences: 1 //must be 1 for greedy search
178
  // })
179
 
180
- let generator = pipeline("text-generation", "HuggingFaceH4/zephyr-7b-beta")
181
 
182
- // let MESSAGES = PREPROMPT + PROMPT
183
  // for zephyr customizing
184
- let MESSAGES = [
185
- {
186
- "role": "system",
187
- "content": PREPROMPT
188
- },{
189
- "role": "user",
190
- "content": PROMPT
191
- }
192
- ]
193
-
194
- let res = await generator(MESSAGES)
195
 
196
  // let res = await pipe(MESSAGES, {
197
  // max_new_tokens: 150,
@@ -199,11 +197,14 @@ async function runModel(PREPROMPT, PROMPT){
199
  // top_k: 50,
200
  // top_p: 0.95
201
  // });
 
 
 
202
 
203
  console.log(res)
204
 
205
- // var modelResult = await res[0].generated_text
206
- var modelResult = await res[0].generated_text[0].content
207
  console.log(modelResult)
208
 
209
  return modelResult
 
177
  // num_return_sequences: 1 //must be 1 for greedy search
178
  // })
179
 
180
+ // let generator = pipeline("text-generation", "HuggingFaceH4/zephyr-7b-beta")
181
 
182
+ let MESSAGES = PREPROMPT + PROMPT
183
  // for zephyr customizing
184
+ // let MESSAGES = [
185
+ // {
186
+ // "role": "system",
187
+ // "content": PREPROMPT
188
+ // },{
189
+ // "role": "user",
190
+ // "content": PROMPT
191
+ // }
192
+ // ]
 
 
193
 
194
  // let res = await pipe(MESSAGES, {
195
  // max_new_tokens: 150,
 
197
  // top_k: 50,
198
  // top_p: 0.95
199
  // });
200
+ let generator = pipeline('text-generation', 'Xenova/distilgpt2')
201
+
202
+ let res = await generator(MESSAGES)
203
 
204
  console.log(res)
205
 
206
+ var modelResult = await res[0].generated_text
207
+ // var modelResult = await res[0].generated_text[0].content
208
  console.log(modelResult)
209
 
210
  return modelResult