File size: 3,299 Bytes
aa08202
2e6c75e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
import axios from "axios";
import { FormData, Blob } from "formdata-node";
import { fileTypeFromBuffer } from "file-type";

async function askOpenGPT4o(prompt, imgBuffer) {
  try {
    const BASE_URL = "https://kingnish-opengpt-4o.hf.space";
    const session_hash = Math.random().toString(36).substring(2).slice(1);
    const session_hash_2 = Math.random().toString(36).substring(2).slice(1);
    
    if(imgBuffer) {
      const types = await fileTypeFromBuffer(imgBuffer);
      const filenames = "images_" + Math.floor(1000 + Math.random() * 9000) + "." + types.ext;
      const forms = new FormData();
      const blobs = new Blob([imgBuffer], { type: types.mime });

      forms.append("files", blobs, filenames);

      const img_path_response = await axios({
        method: "POST",
        url: BASE_URL + "/upload?upload_id=" + session_hash_2,
        data: forms,
        headers: {
          "Content-Type": "multipart/form-data",
        }
      });

      await axios({
        method: "POST",
        url: BASE_URL + "/run/predict?__theme=light",
        data: {
          data: [{ 
            text: prompt, 
            files: [ { 
              path: img_path_response.data[0],
              meta: { _type: "gradio.FileData" }, 
              mime_type: types.mime, 
              orig_name: filenames,
              size: imgBuffer?.length,
              url: BASE_URL + "/file=" + img_path_response.data[0]
          }] }],
          event_data: null,
          fn_index: 3,
          trigger_id: 34,
          session_hash,
        }
      });
    } else if(!imgBuffer) {
      await axios({
        method: "POST",
        url: BASE_URL + "/run/predict?__theme=light",
        data: {
          data: [{ text: prompt, files: [] }],
          event_data: null,
          fn_index: 3,
          trigger_id: 34,
          session_hash,
        }
      });
    }

    const res = await axios({
      method: "POST",
      url: BASE_URL + "/queue/join?__theme=light",
      data: {
        data: [
          null,
          null,
          'idefics2-8b-chatty',
          'Top P Sampling',
          0.5,
          4096,
          1,
          0.9,
          true,
        ],
        event_data: null,
        fn_index: 5,
        trigger_id: 34,
        session_hash,
      }
    });

    if(res.data.event_id) {
      const anu = await axios({
        method: "GET",
        url: BASE_URL + "/queue/data?session_hash=" + session_hash
      });
      const lines = anu.data.split('\n');
      const processStartsLine = lines.find(line => line.includes('process_completed'));
      const processStartsData = JSON.parse(processStartsLine.replace('data: ', ''));
    
      const processStartsLine_2 = lines.find(line => line.includes('process_generating'));
      const processStartsData_2 = JSON.parse(processStartsLine_2.replace('data: ', ''));


      if (processStartsData?.success) {
        return processStartsData.output.data[0][0][1] || processStartsData.output.data[0][1][1];
      } else if (processStartsData_2?.success) {
        return processStartsData_2.output.data[0][0][1] || processStartsData_2.output.data[0][1][1];
      }
    }
  } catch (error) {
    console.error('Error occurred:', error);
    return `Error: ${error.message}`;
  }
}

export { askOpenGPT4o }