File size: 8,461 Bytes
614d0f9 1ee4720 614d0f9 c0412f7 aa77cec 3d91a45 1455cf4 1ee4720 581179a 96990ed aa77cec 1ee4720 0a0456c 1ee4720 96990ed 1ee4720 581179a 96990ed 581179a 96990ed 581179a 96990ed 581179a e19f5d0 581179a e19f5d0 581179a 96990ed 581179a 96990ed 581179a 96990ed e16b0b1 96990ed 0a0456c 96990ed 0a0456c 581179a d3ccb32 3541abb d3ccb32 3541abb d3ccb32 1ee4720 3d91a45 1ee4720 3d91a45 1ee4720 3d91a45 1ee4720 3d91a45 1ee4720 5b404b3 e530b52 ae852b9 e530b52 581179a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 |
// import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/[email protected]';
import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/[email protected]/+esm';
const inference = new HfInference();
// let pipe = await pipeline('text-generation', 'mistralai/Mistral-7B-Instruct-v0.2');
// models('Xenova/gpt2', 'Xenova/gpt-3.5-turbo', 'mistralai/Mistral-7B-Instruct-v0.2', 'Xenova/llama-68m', 'meta-llama/Meta-Llama-3-8B', 'Xenova/bloom-560m', 'Xenova/distilgpt2')
// list of models by task: 'https://huggingface.co/docs/transformers.js/index#supported-tasksmodels'
// Since we will download the model from the Hugging Face Hub, we can skip the local model check
// env.allowLocalModels = false;
///////// VARIABLES
let promptResult, fillAResult, fillBResult, fillCResult, promptButton, buttonButton, promptInput, fillA, fillB, fillC, modelDisplay, modelResult
// const detector = await pipeline('text-generation', 'meta-llama/Meta-Llama-3-8B', 'Xenova/LaMini-Flan-T5-783M');
let MODELNAME = 'Xenova/gpt-3.5-turbo'
// models('Xenova/gpt2', 'Xenova/gpt-3.5-turbo', 'mistralai/Mistral-7B-Instruct-v0.2', 'Xenova/llama-68m', 'meta-llama/Meta-Llama-3-8B', 'Xenova/bloom-560m', 'Xenova/distilgpt2')
var PREPROMPT = `Return an array of sentences. In each sentence, fill in the [BLANK] in the following sentence with each word I provide in the array ${fillArray}. Replace any [FILL] with an appropriate word of your choice.`
///// p5 STUFF
new p5(function(p5){
p5.setup = function(){
console.log('p5 loaded')
p5.noCanvas()
makeInterface()
// let canvas = p5.createCanvas(200,200)
// canvas.position(300, 1000);
// p5.background(200)
// p5.textSize(20)
// p5.textAlign(p5.CENTER,p5.CENTER)
}
p5.draw = function(){
//
}
window.onload = function(){
console.log('sketchfile loaded')
}
function makeInterface(){
console.log('got to make interface')
let title = p5.createElement('h1', 'p5.js Critical AI Prompt Battle')
title.position(0,50)
promptInput = p5.createInput("")
promptInput.position(0,160)
promptInput.size(500);
promptInput.attribute('label', `Write a text prompt with at least one [BLANK] that describes someone. You can also write [FILL] where you want the bot to fill in a word.`)
promptInput.value(`The [BLANK] works as a [FILL] but wishes for [FILL].`)
promptInput.elt.style.fontSize = "15px";
p5.createP(promptInput.attribute('label')).position(0,100)
// p5.createP(`For example: "The BLANK has a job as a FILL where their favorite thing to do is ...`)
//make for loop to generate
//make a button to make another
//add them to the list of items
fillA = p5.createInput("");
fillA.position(0, 240);
fillA.size(200);
fillA.elt.style.fontSize = "15px";
fillAResult = fillA.value()
fillB = p5.createInput("");
fillB.position(0, 270);
fillB.size(200);
fillB.elt.style.fontSize = "15px";
fillBResult = fillB.value()
fillC = p5.createInput("");
fillC.position(0, 300);
fillC.size(200);
fillC.elt.style.fontSize = "15px";
fillCResult = fillC.value()
// modelDisplay = p5.createElement("p", "Results:");
// modelDisplay.position(0, 380);
// // setTimeout(() => {
// modelDisplay.html(modelResult)
// // }, 2000);
//a model drop down list?
//GO BUTTON
promptButton = p5.createButton("GO").position(0, 340);
promptButton.position(0, 340);
promptButton.elt.style.fontSize = "15px";
promptButton.mousePressed(test)
// describe(``)
// TO-DO alt-text description
}
function test(){
console.log('did test')
console.log(promptInput.value(), fillAResult.value(), fillBResult.value(), fillCResult.value())
}
// var modelResult = promptButton.mousePressed(runModel) = function(){
// // listens for the button to be clicked
// // run the prompt through the model here
// // modelResult = runModel()
// // return modelResult
// runModel()
// }
// function makefill(i){
// i = p5.createInput("");
// i.position(0, 300); //append to last fill and move buttons down
// i.size(200);
// i.elt.style.fontSize = "15px";
// }
});
///// MODEL STUFF
// var PROMPT = `The [BLANK] works as a [FILL] but wishes for [FILL].`
// /// this needs to run on button click, use string variables to fill in the form
var PROMPT = promptInput.value()
var fillArray = ["mother", "father", "sister", "brother"]
// // for num of fills put in list
// var fillArray = [`${fillAResult}`, `${fillBResult}`, `${fillCResult}`]
// async function runModel(){
// // Chat completion API
// const out = await inference.chatCompletion({
// model: MODELNAME,
// // model: "google/gemma-2-9b",
// messages: [{ role: "user", content: PREPROMPT + PROMPT }],
// max_tokens: 100
// });
// // let out = await pipe(PREPROMPT + PROMPT)
// // let out = await pipe(PREPROMPT + PROMPT, {
// // max_new_tokens: 250,
// // temperature: 0.9,
// // // return_full_text: False,
// // repetition_penalty: 1.5,
// // // no_repeat_ngram_size: 2,
// // // num_beams: 2,
// // num_return_sequences: 1
// // });
// console.log(out)
// var modelResult = await out.choices[0].message.content
// // var modelResult = await out[0].generated_text
// console.log(modelResult);
// return modelResult
// }
// Reference the elements that we will need
// const status = document.getElementById('status');
// const fileUpload = document.getElementById('upload');
// const imageContainer = document.getElementById('container');
// const example = document.getElementById('example');
// const EXAMPLE_URL = 'https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/city-streets.jpg';
// Create a new object detection pipeline
// status.textContent = 'Loading model...';
// const detector = await pipeline('object-detection', 'Xenova/detr-resnet-50');
// status.textContent = 'Ready';
// example.addEventListener('click', (e) => {
// e.preventDefault();
// detect(EXAMPLE_URL);
// });
// fileUpload.addEventListener('change', function (e) {
// const file = e.target.files[0];
// if (!file) {
// return;
// }
// const reader = new FileReader();
// // Set up a callback when the file is loaded
// reader.onload = e2 => detect(e2.target.result);
// reader.readAsDataURL(file);
// });
// // Detect objects in the image
// async function detect(img) {
// imageContainer.innerHTML = '';
// imageContainer.style.backgroundImage = `url(${img})`;
// status.textContent = 'Analysing...';
// const output = await detector(img, {
// threshold: 0.5,
// percentage: true,
// });
// status.textContent = '';
// output.forEach(renderBox);
// }
// // Render a bounding box and label on the image
// function renderBox({ box, label }) {
// const { xmax, xmin, ymax, ymin } = box;
// // Generate a random color for the box
// const color = '#' + Math.floor(Math.random() * 0xFFFFFF).toString(16).padStart(6, 0);
// // Draw the box
// const boxElement = document.createElement('div');
// boxElement.className = 'bounding-box';
// Object.assign(boxElement.style, {
// borderColor: color,
// left: 100 * xmin + '%',
// top: 100 * ymin + '%',
// width: 100 * (xmax - xmin) + '%',
// height: 100 * (ymax - ymin) + '%',
// })
// // Draw label
// const labelElement = document.createElement('span');
// labelElement.textContent = label;
// labelElement.className = 'bounding-box-label';
// labelElement.style.backgroundColor = color;
// boxElement.appendChild(labelElement);
// imageContainer.appendChild(boxElement);
// }
// function setup(){
// let canvas = createCanvas(200,200)
// canvas.position(300, 1000);
// background(200)
// textSize(20)
// textAlign(CENTER,CENTER)
// console.log('p5 loaded')
// }
// function draw(){
// //
// } |