Spaces:
Sleeping
Sleeping
Create chatgpt.js
Browse files- lib/chatgpt.js +108 -0
lib/chatgpt.js
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import { FormData, Blob } from "formdata-node";
|
| 2 |
+
import { fileTypeFromBuffer } from "file-type";
|
| 3 |
+
|
| 4 |
+
async function askOpenGPT4o(prompt, imgBuffer) {
|
| 5 |
+
try {
|
| 6 |
+
const BASE_URL = "https://kingnish-opengpt-4o.hf.space";
|
| 7 |
+
const session_hash = Math.random().toString(36).substring(2).slice(1);
|
| 8 |
+
const session_hash_2 = Math.random().toString(36).substring(2).slice(1);
|
| 9 |
+
|
| 10 |
+
if(imgBuffer) {
|
| 11 |
+
const types = await fileTypeFromBuffer(imgBuffer);
|
| 12 |
+
const filenames = "images_" + Math.floor(1000 + Math.random() * 9000) + "." + types.ext;
|
| 13 |
+
const forms = new FormData();
|
| 14 |
+
const blobs = new Blob([imgBuffer], { type: types.mime });
|
| 15 |
+
|
| 16 |
+
forms.append("files", blobs, filenames);
|
| 17 |
+
|
| 18 |
+
const img_path_response = await axios({
|
| 19 |
+
method: "POST",
|
| 20 |
+
url: BASE_URL + "/upload?upload_id=" + session_hash_2,
|
| 21 |
+
data: forms,
|
| 22 |
+
headers: {
|
| 23 |
+
"Content-Type": "multipart/form-data",
|
| 24 |
+
}
|
| 25 |
+
});
|
| 26 |
+
|
| 27 |
+
await axios({
|
| 28 |
+
method: "POST",
|
| 29 |
+
url: BASE_URL + "/run/predict?__theme=light",
|
| 30 |
+
data: {
|
| 31 |
+
data: [{
|
| 32 |
+
text: prompt,
|
| 33 |
+
files: [ {
|
| 34 |
+
path: img_path_response.data[0],
|
| 35 |
+
meta: { _type: "gradio.FileData" },
|
| 36 |
+
mime_type: types.mime,
|
| 37 |
+
orig_name: filenames,
|
| 38 |
+
size: imgBuffer?.length,
|
| 39 |
+
url: BASE_URL + "/file=" + img_path_response.data[0]
|
| 40 |
+
}] }],
|
| 41 |
+
event_data: null,
|
| 42 |
+
fn_index: 3,
|
| 43 |
+
trigger_id: 34,
|
| 44 |
+
session_hash,
|
| 45 |
+
}
|
| 46 |
+
});
|
| 47 |
+
} else if(!imgBuffer) {
|
| 48 |
+
await axios({
|
| 49 |
+
method: "POST",
|
| 50 |
+
url: BASE_URL + "/run/predict?__theme=light",
|
| 51 |
+
data: {
|
| 52 |
+
data: [{ text: prompt, files: [] }],
|
| 53 |
+
event_data: null,
|
| 54 |
+
fn_index: 3,
|
| 55 |
+
trigger_id: 34,
|
| 56 |
+
session_hash,
|
| 57 |
+
}
|
| 58 |
+
});
|
| 59 |
+
}
|
| 60 |
+
|
| 61 |
+
const res = await axios({
|
| 62 |
+
method: "POST",
|
| 63 |
+
url: BASE_URL + "/queue/join?__theme=light",
|
| 64 |
+
data: {
|
| 65 |
+
data: [
|
| 66 |
+
null,
|
| 67 |
+
null,
|
| 68 |
+
'idefics2-8b-chatty',
|
| 69 |
+
'Top P Sampling',
|
| 70 |
+
0.5,
|
| 71 |
+
4096,
|
| 72 |
+
1,
|
| 73 |
+
0.9,
|
| 74 |
+
true,
|
| 75 |
+
],
|
| 76 |
+
event_data: null,
|
| 77 |
+
fn_index: 5,
|
| 78 |
+
trigger_id: 34,
|
| 79 |
+
session_hash,
|
| 80 |
+
}
|
| 81 |
+
});
|
| 82 |
+
|
| 83 |
+
if(res.data.event_id) {
|
| 84 |
+
const anu = await axios({
|
| 85 |
+
method: "GET",
|
| 86 |
+
url: BASE_URL + "/queue/data?session_hash=" + session_hash
|
| 87 |
+
});
|
| 88 |
+
const lines = anu.data.split('\n');
|
| 89 |
+
const processStartsLine = lines.find(line => line.includes('process_completed'));
|
| 90 |
+
const processStartsData = JSON.parse(processStartsLine.replace('data: ', ''));
|
| 91 |
+
|
| 92 |
+
const processStartsLine_2 = lines.find(line => line.includes('process_generating'));
|
| 93 |
+
const processStartsData_2 = JSON.parse(processStartsLine_2.replace('data: ', ''));
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
if (processStartsData?.success) {
|
| 97 |
+
return processStartsData.output.data[0][0][1] || processStartsData.output.data[0][1][1];
|
| 98 |
+
} else if (processStartsData_2?.success) {
|
| 99 |
+
return processStartsData_2.output.data[0][0][1] || processStartsData_2.output.data[0][1][1];
|
| 100 |
+
}
|
| 101 |
+
}
|
| 102 |
+
} catch (error) {
|
| 103 |
+
console.error('Error occurred:', error);
|
| 104 |
+
return `Error: ${error.message}`;
|
| 105 |
+
}
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
export { askOpenGPT4o }
|