Spaces:
Running
Running
Update index.js
Browse files
index.js
CHANGED
@@ -13,9 +13,10 @@ app.use(express.json({ limit: "500mb" }));
|
|
13 |
app.use(express.urlencoded({ limit: '500mb', extended: true }));
|
14 |
app.use((req, res, next) => {
|
15 |
next()
|
16 |
-
})
|
17 |
|
18 |
-
const apikey = "@SadTeam77"
|
|
|
19 |
|
20 |
app.all('/', (req, res) => {
|
21 |
const status = {}
|
@@ -72,6 +73,27 @@ app.post('/api/img2img', async (req, res) => {
|
|
72 |
}
|
73 |
})
|
74 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
const PORT = process.env.PORT || 7860
|
76 |
app.listen(PORT, () => {
|
77 |
console.log('App running on port', PORT)
|
@@ -93,8 +115,6 @@ async function processImage2Img(imgBuffer, prompt) {
|
|
93 |
try {
|
94 |
const imageArray = Buffer.from(imgBuffer);
|
95 |
|
96 |
-
process.env.GRADIO_CLIENT_DEBUG = 'true';
|
97 |
-
|
98 |
const app = await Client.connect("Manjushri/SDXL-Turbo-Img2Img-CPU");
|
99 |
const result = await app.predict("/predict", [
|
100 |
imageArray, // binary input for the image
|
@@ -108,4 +128,23 @@ async function processImage2Img(imgBuffer, prompt) {
|
|
108 |
reject(e.message);
|
109 |
}
|
110 |
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
111 |
}
|
|
|
13 |
app.use(express.urlencoded({ limit: '500mb', extended: true }));
|
14 |
app.use((req, res, next) => {
|
15 |
next()
|
16 |
+
});
|
17 |
|
18 |
+
const apikey = "@SadTeam77";
|
19 |
+
process.env.GRADIO_CLIENT_DEBUG = 'true';
|
20 |
|
21 |
app.all('/', (req, res) => {
|
22 |
const status = {}
|
|
|
73 |
}
|
74 |
})
|
75 |
|
76 |
+
app.post('/api/chatai/gpt4', async (req, res) => {
|
77 |
+
try {
|
78 |
+
console.log(req.body)
|
79 |
+
const { prompt, status } = req.body
|
80 |
+
if (!images) return res.json({ success: false, message: 'Required an images!' })
|
81 |
+
if (!prompt) return res.json({ succese: false, message: 'Require an Promot text!'})
|
82 |
+
|
83 |
+
if(status !== apikey) return res.json({ success: false, message: 'Invalid status!' })
|
84 |
+
const response = await chatgpt4(prompt);
|
85 |
+
|
86 |
+
res.json({
|
87 |
+
status: true,
|
88 |
+
result: response
|
89 |
+
});
|
90 |
+
} catch (e) {
|
91 |
+
console.log(e)
|
92 |
+
e = String(e)
|
93 |
+
res.json({ error: true, message: e === '[object Object]' ? 'Internal Server Error' : e })
|
94 |
+
}
|
95 |
+
})
|
96 |
+
|
97 |
const PORT = process.env.PORT || 7860
|
98 |
app.listen(PORT, () => {
|
99 |
console.log('App running on port', PORT)
|
|
|
115 |
try {
|
116 |
const imageArray = Buffer.from(imgBuffer);
|
117 |
|
|
|
|
|
118 |
const app = await Client.connect("Manjushri/SDXL-Turbo-Img2Img-CPU");
|
119 |
const result = await app.predict("/predict", [
|
120 |
imageArray, // binary input for the image
|
|
|
128 |
reject(e.message);
|
129 |
}
|
130 |
});
|
131 |
+
}
|
132 |
+
|
133 |
+
async function chatgpt4(prompt) {
|
134 |
+
try {
|
135 |
+
const client = await Client.connect("KingNish/OpenGPT-4o");
|
136 |
+
const result = await client.predict("/chat", {
|
137 |
+
user_prompt: {"text": prompt,"files":[]},
|
138 |
+
model_selector: "idefics2-8b-chatty",
|
139 |
+
decoding_strategy: "Greedy",
|
140 |
+
temperature: 0,
|
141 |
+
max_new_tokens: 2048,
|
142 |
+
repetition_penalty: 0.01,
|
143 |
+
top_p: 0.01,
|
144 |
+
web_search: true,
|
145 |
+
});
|
146 |
+
return result.data;
|
147 |
+
} catch(e) {
|
148 |
+
return e.message;
|
149 |
+
}
|
150 |
}
|