Sarah Ciston commited on
Commit
2a9ee6d
·
1 Parent(s): 61c49f0

hyperparams, diff model

Browse files
Files changed (2) hide show
  1. README.md +3 -2
  2. sketch.js +10 -3
README.md CHANGED
@@ -7,8 +7,9 @@ sdk: static
7
  pinned: false
8
  models:
9
  # - Xenova/detr-resnet-50
10
- - Xenova/gpt2
11
- - Xenova/bloom-560m
 
12
  # - Xenova/llama-68m
13
  # - Xenova/LaMini-Flan-T5-783M
14
  # - mistralai/Mistral-7B-Instruct-v0.2
 
7
  pinned: false
8
  models:
9
  # - Xenova/detr-resnet-50
10
+ # - Xenova/gpt2
11
+ # - Xenova/bloom-560m
12
+ - Xenova/distilgpt2
13
  # - Xenova/llama-68m
14
  # - Xenova/LaMini-Flan-T5-783M
15
  # - mistralai/Mistral-7B-Instruct-v0.2
sketch.js CHANGED
@@ -2,8 +2,8 @@ import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers
2
  // import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/[email protected]/+esm';
3
  // const inference = new HfInference();
4
 
5
- let pipe = await pipeline('text-generation', model='Xenova/bloom-560m', return_full_text=False);
6
- // models('Xenova/gpt2', 'mistralai/Mistral-7B-Instruct-v0.2', 'meta-llama/Meta-Llama-3-8B', 'Xenova/bloom-560m')
7
  // list of models by task: 'https://huggingface.co/docs/transformers.js/index#supported-tasksmodels'
8
 
9
 
@@ -30,7 +30,12 @@ var PROMPT = `The [BLANK] works as a [FILL] but wishes for [FILL].`
30
  // let out = await pipe(PREPROMPT + PROMPT)
31
  let out = await pipe(PREPROMPT + PROMPT, {
32
  max_new_tokens: 250,
33
- temperature: 0.9
 
 
 
 
 
34
  });
35
  console.log(out)
36
 
@@ -157,6 +162,8 @@ new p5(function(p5){
157
 
158
  function makeInterface(){
159
  console.log('got to make interface')
 
 
160
  promptInput = p5.createInput("")
161
  promptInput.position(0,160)
162
  promptInput.size(500);
 
2
  // import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/[email protected]/+esm';
3
  // const inference = new HfInference();
4
 
5
+ let pipe = await pipeline('text-generation', model='Xenova/distilgpt2');
6
+ // models('Xenova/gpt2', 'mistralai/Mistral-7B-Instruct-v0.2', 'meta-llama/Meta-Llama-3-8B', 'Xenova/bloom-560m', 'Xenova/distilgpt2')
7
  // list of models by task: 'https://huggingface.co/docs/transformers.js/index#supported-tasksmodels'
8
 
9
 
 
30
  // let out = await pipe(PREPROMPT + PROMPT)
31
  let out = await pipe(PREPROMPT + PROMPT, {
32
  max_new_tokens: 250,
33
+ temperature: 2,
34
+ return_full_text: False
35
+ repetition_penalty: 1.5,
36
+ no_repeat_ngram_size: 2,
37
+ num_beams: 2,
38
+ num_return_sequences: 2,
39
  });
40
  console.log(out)
41
 
 
162
 
163
  function makeInterface(){
164
  console.log('got to make interface')
165
+ p5.createElement()
166
+
167
  promptInput = p5.createInput("")
168
  promptInput.position(0,160)
169
  promptInput.size(500);