Spaces:
Runtime error
Runtime error
Upload 3 files
Browse files- app.py +69 -0
- client.py +36 -0
- requirements.txt +4 -0
app.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import gradio as gr
|
3 |
+
import openai
|
4 |
+
|
5 |
+
from openai import AzureOpenAI
|
6 |
+
from openai import OpenAIError, RateLimitError
|
7 |
+
|
8 |
+
from dotenv import load_dotenv
|
9 |
+
import os
|
10 |
+
load_dotenv()
|
11 |
+
|
12 |
+
|
13 |
+
AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT")
|
14 |
+
AZURE_OPENAI_KEY = os.getenv("AZURE_OPENAI_KEY")
|
15 |
+
AZURE_API_VERSION = os.getenv("AZURE_API_VERSION")
|
16 |
+
|
17 |
+
client = AzureOpenAI(
|
18 |
+
azure_endpoint=AZURE_OPENAI_ENDPOINT,
|
19 |
+
api_key=AZURE_OPENAI_KEY,
|
20 |
+
api_version=AZURE_API_VERSION
|
21 |
+
)
|
22 |
+
from openai import OpenAI,AsyncAzureOpenAI
|
23 |
+
from client import gradio_client
|
24 |
+
|
25 |
+
import gradio as gr
|
26 |
+
import logging
|
27 |
+
logging.basicConfig(level=logging.INFO)
|
28 |
+
import time
|
29 |
+
import asyncio
|
30 |
+
|
31 |
+
def get_streaming_response(query):
|
32 |
+
try:
|
33 |
+
messages, urls = gradio_client.predict(
|
34 |
+
query=query,
|
35 |
+
api_name="/predict")
|
36 |
+
|
37 |
+
logging.info("Starting streaming response...")
|
38 |
+
|
39 |
+
response = client.chat.completions.create(
|
40 |
+
messages=messages,
|
41 |
+
model="urdu-llama",
|
42 |
+
temperature=0.5,
|
43 |
+
stream=True
|
44 |
+
)
|
45 |
+
|
46 |
+
output = ''
|
47 |
+
for chunk in response:
|
48 |
+
if chunk.choices:
|
49 |
+
token = chunk.choices[0].delta.content
|
50 |
+
if token:
|
51 |
+
output += token
|
52 |
+
yield output,urls
|
53 |
+
|
54 |
+
except Exception as e:
|
55 |
+
yield f"Error: {str(e)}"
|
56 |
+
|
57 |
+
|
58 |
+
iface = gr.Interface(
|
59 |
+
fn=get_streaming_response,
|
60 |
+
inputs=gr.Textbox(placeholder="Ask me anything...", show_label=False),
|
61 |
+
outputs=[
|
62 |
+
gr.Markdown(label="AI Response"),
|
63 |
+
# gr.JSON(label="Related Questions to Explore:"),
|
64 |
+
gr.JSON(label="URLs")
|
65 |
+
],
|
66 |
+
live=False
|
67 |
+
)
|
68 |
+
|
69 |
+
iface.launch()
|
client.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from openai import AzureOpenAI
|
2 |
+
from openai import OpenAIError, RateLimitError
|
3 |
+
|
4 |
+
from dotenv import load_dotenv
|
5 |
+
import os
|
6 |
+
|
7 |
+
AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT")
|
8 |
+
AZURE_OPENAI_KEY = os.getenv("AZURE_OPENAI_KEY")
|
9 |
+
AZURE_API_VERSION = os.getenv("AZURE_API_VERSION")
|
10 |
+
|
11 |
+
client = AzureOpenAI(
|
12 |
+
azure_endpoint=AZURE_OPENAI_ENDPOINT,
|
13 |
+
api_key=AZURE_OPENAI_KEY,
|
14 |
+
api_version=AZURE_API_VERSION
|
15 |
+
)
|
16 |
+
from openai import OpenAI,AsyncAzureOpenAI
|
17 |
+
|
18 |
+
|
19 |
+
openaiclient = AsyncAzureOpenAI(
|
20 |
+
azure_endpoint=AZURE_OPENAI_ENDPOINT,
|
21 |
+
api_key=AZURE_OPENAI_KEY,
|
22 |
+
api_version=AZURE_API_VERSION
|
23 |
+
|
24 |
+
)
|
25 |
+
|
26 |
+
from gradio_client import Client
|
27 |
+
|
28 |
+
|
29 |
+
# hf_token=os.getenv('hf_token')
|
30 |
+
hf_token = os.getenv("hf_token")
|
31 |
+
|
32 |
+
from gradio_client import Client
|
33 |
+
|
34 |
+
# space="traversaal-internal/Async-web-api-for-deepsearch"
|
35 |
+
space="traversaal-internal/Deep-Researcher-Gradio"
|
36 |
+
gradio_client = Client(space,hf_token=hf_token)
|
requirements.txt
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
gradio
|
3 |
+
openai==1.58.1
|
4 |
+
gradio_client
|