Update app.py
Browse files
app.py
CHANGED
@@ -2,13 +2,15 @@ import gradio as gr
|
|
2 |
import requests
|
3 |
import mimetypes
|
4 |
import json, os
|
|
|
|
|
5 |
|
6 |
LLM_API = os.environ.get("LLM_API")
|
7 |
LLM_URL = os.environ.get("LLM_URL")
|
8 |
|
9 |
USER_ID = "HuggingFace Space" # Placeholder user ID
|
10 |
|
11 |
-
def send_chat_message(LLM_URL, LLM_API, category, file_id):
|
12 |
payload = {
|
13 |
"inputs": {},
|
14 |
"query": category,
|
@@ -24,59 +26,55 @@ def send_chat_message(LLM_URL, LLM_API, category, file_id):
|
|
24 |
]
|
25 |
}
|
26 |
print("Sending chat message payload:", payload) # Debug information
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
if last_thought:
|
53 |
-
# Structure the thought text
|
54 |
-
return last_thought.strip()
|
55 |
-
else:
|
56 |
-
return "Error: No thought found in the response"
|
57 |
|
58 |
-
def upload_file(LLM_URL, LLM_API, file_path, user_id):
|
59 |
if not os.path.exists(file_path):
|
60 |
return f"Error: File {file_path} not found"
|
61 |
mime_type, _ = mimetypes.guess_type(file_path)
|
62 |
with open(file_path, 'rb') as f:
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
|
|
77 |
|
78 |
-
def handle_input(file_path, category):
|
79 |
-
upload_response = upload_file(LLM_URL, LLM_API, file_path, USER_ID)
|
80 |
print("Upload response:", upload_response) # Debug information
|
81 |
if "error" in upload_response:
|
82 |
return upload_response
|
@@ -84,7 +82,7 @@ def handle_input(file_path, category):
|
|
84 |
if not file_id:
|
85 |
return "Error: No file ID returned from upload"
|
86 |
|
87 |
-
chat_response = send_chat_message(LLM_URL, LLM_API, category, file_id)
|
88 |
print("Chat response:", chat_response) # Debug information
|
89 |
return chat_response
|
90 |
|
|
|
2 |
import requests
|
3 |
import mimetypes
|
4 |
import json, os
|
5 |
+
import asyncio
|
6 |
+
import aiohttp
|
7 |
|
8 |
LLM_API = os.environ.get("LLM_API")
|
9 |
LLM_URL = os.environ.get("LLM_URL")
|
10 |
|
11 |
USER_ID = "HuggingFace Space" # Placeholder user ID
|
12 |
|
13 |
+
async def send_chat_message(LLM_URL, LLM_API, category, file_id):
|
14 |
payload = {
|
15 |
"inputs": {},
|
16 |
"query": category,
|
|
|
26 |
]
|
27 |
}
|
28 |
print("Sending chat message payload:", payload) # Debug information
|
29 |
+
async with aiohttp.ClientSession() as session:
|
30 |
+
async with session.post(
|
31 |
+
f"{LLM_URL}/chat-messages",
|
32 |
+
headers={"Authorization": f"Bearer {LLM_API}"},
|
33 |
+
json=payload
|
34 |
+
) as response:
|
35 |
+
print("Request URL:", f"{LLM_URL}/chat-messages")
|
36 |
+
print("Response status code:", response.status)
|
37 |
+
if response.status == 404:
|
38 |
+
return "Error: Endpoint not found (404)"
|
39 |
+
|
40 |
+
last_thought = None
|
41 |
+
async for line in response.content:
|
42 |
+
if line:
|
43 |
+
try:
|
44 |
+
data = json.loads(line.split(b"data: ")[1].decode("utf-8"))
|
45 |
+
if data.get("event") == "agent_thought":
|
46 |
+
last_thought = data.get("thought")
|
47 |
+
except (IndexError, json.JSONDecodeError):
|
48 |
+
continue
|
49 |
+
|
50 |
+
if last_thought:
|
51 |
+
return last_thought.strip()
|
52 |
+
else:
|
53 |
+
return "Error: No thought found in the response"
|
|
|
|
|
|
|
|
|
|
|
54 |
|
55 |
+
async def upload_file(LLM_URL, LLM_API, file_path, user_id):
|
56 |
if not os.path.exists(file_path):
|
57 |
return f"Error: File {file_path} not found"
|
58 |
mime_type, _ = mimetypes.guess_type(file_path)
|
59 |
with open(file_path, 'rb') as f:
|
60 |
+
async with aiohttp.ClientSession() as session:
|
61 |
+
async with session.post(
|
62 |
+
f"{LLM_URL}/files/upload",
|
63 |
+
headers={"Authorization": f"Bearer {LLM_API}"},
|
64 |
+
data={"file": (file_path, f, mime_type), "user": user_id}
|
65 |
+
) as response:
|
66 |
+
print("Upload response status code:", response.status) # Debug information
|
67 |
+
if response.status == 404:
|
68 |
+
return "Error: Endpoint not found (404)"
|
69 |
+
response_text = await response.text()
|
70 |
+
print("Raw upload response text:", response_text) # Debug information
|
71 |
+
try:
|
72 |
+
return json.loads(response_text)
|
73 |
+
except json.JSONDecodeError:
|
74 |
+
return "Error: Invalid JSON response"
|
75 |
|
76 |
+
async def handle_input(file_path, category):
|
77 |
+
upload_response = await upload_file(LLM_URL, LLM_API, file_path, USER_ID)
|
78 |
print("Upload response:", upload_response) # Debug information
|
79 |
if "error" in upload_response:
|
80 |
return upload_response
|
|
|
82 |
if not file_id:
|
83 |
return "Error: No file ID returned from upload"
|
84 |
|
85 |
+
chat_response = await send_chat_message(LLM_URL, LLM_API, category, file_id)
|
86 |
print("Chat response:", chat_response) # Debug information
|
87 |
return chat_response
|
88 |
|