Spaces:
Runtime error
Runtime error
Threatthriver
commited on
Commit
•
0151088
1
Parent(s):
1c9a4c1
Update app.py
Browse files
app.py
CHANGED
@@ -21,6 +21,7 @@ def respond(
|
|
21 |
"""
|
22 |
client = InferenceClient(model=available_models[model_name])
|
23 |
|
|
|
24 |
messages = [{"role": "system", "content": system_message}]
|
25 |
for user_input, assistant_response in history:
|
26 |
messages.append({"role": "user", "content": user_input})
|
@@ -28,7 +29,9 @@ def respond(
|
|
28 |
messages.append({"role": "user", "content": message})
|
29 |
|
30 |
streamed_response = ""
|
|
|
31 |
try:
|
|
|
32 |
for response in client.chat_completion(
|
33 |
messages=messages,
|
34 |
max_tokens=max_tokens,
|
@@ -39,19 +42,24 @@ def respond(
|
|
39 |
chunk = response.choices[0].delta.get("content", "")
|
40 |
streamed_response += chunk
|
41 |
yield streamed_response
|
|
|
42 |
except Exception as e:
|
43 |
yield f"**Error:** {str(e)}"
|
44 |
|
45 |
-
# Latest updates
|
46 |
latest_updates = """
|
47 |
**Chatbot - Latest Updates:**
|
48 |
|
49 |
-
*
|
50 |
-
*
|
|
|
51 |
"""
|
52 |
|
53 |
def show_updates_and_respond(history, system_message, max_tokens, temperature, top_p, model_name):
|
54 |
-
|
|
|
|
|
|
|
55 |
yield from respond(
|
56 |
message="Show me the latest updates",
|
57 |
history=history,
|
@@ -61,8 +69,8 @@ def show_updates_and_respond(history, system_message, max_tokens, temperature, t
|
|
61 |
top_p=top_p,
|
62 |
model_name=model_name,
|
63 |
)
|
64 |
-
history[-1] = ("User
|
65 |
-
history.append(("Assistant
|
66 |
yield from respond(
|
67 |
message="What are the latest updates?",
|
68 |
history=history,
|
@@ -73,7 +81,7 @@ def show_updates_and_respond(history, system_message, max_tokens, temperature, t
|
|
73 |
model_name=model_name,
|
74 |
)
|
75 |
|
76 |
-
# Define the Gradio interface
|
77 |
with gr.Blocks(css=".gradio-container {border: none;}") as demo:
|
78 |
chat_history = gr.State([]) # Initialize an empty chat history state
|
79 |
system_message = gr.Textbox(
|
@@ -81,41 +89,24 @@ with gr.Blocks(css=".gradio-container {border: none;}") as demo:
|
|
81 |
label="System message",
|
82 |
lines=2
|
83 |
)
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
model_dropdown = gr.Dropdown(
|
88 |
-
|
89 |
-
|
90 |
-
label="Select Model",
|
91 |
-
)
|
92 |
-
|
93 |
chatbot = gr.Chatbot()
|
94 |
|
95 |
-
|
96 |
-
history = history or []
|
97 |
-
history.append(("User", message))
|
98 |
-
bot_response = next(respond(message, history, system_message, max_tokens, temperature, top_p, model_name))
|
99 |
-
history.append(("Assistant", bot_response))
|
100 |
-
return history, history
|
101 |
-
|
102 |
-
message_input = gr.Textbox(label="Your message")
|
103 |
-
|
104 |
-
message_input.submit(
|
105 |
-
fn=user_interaction,
|
106 |
-
inputs=[message_input, chat_history, system_message, max_tokens_slider, temperature_slider, top_p_slider, model_dropdown],
|
107 |
-
outputs=[chatbot, chat_history],
|
108 |
-
)
|
109 |
-
|
110 |
-
# Add the "Show Updates" button and output area
|
111 |
updates_button = gr.Button("Show Latest Updates")
|
112 |
|
|
|
113 |
updates_button.click(
|
114 |
fn=show_updates_and_respond,
|
115 |
-
inputs=[chat_history, system_message,
|
116 |
-
outputs=
|
117 |
)
|
118 |
|
119 |
-
# Launch the Gradio interface
|
120 |
if __name__ == "__main__":
|
121 |
-
demo.launch(share=True
|
|
|
21 |
"""
|
22 |
client = InferenceClient(model=available_models[model_name])
|
23 |
|
24 |
+
# Prepare the conversation history for the API call
|
25 |
messages = [{"role": "system", "content": system_message}]
|
26 |
for user_input, assistant_response in history:
|
27 |
messages.append({"role": "user", "content": user_input})
|
|
|
29 |
messages.append({"role": "user", "content": message})
|
30 |
|
31 |
streamed_response = ""
|
32 |
+
|
33 |
try:
|
34 |
+
# Generate a response from the model with streaming
|
35 |
for response in client.chat_completion(
|
36 |
messages=messages,
|
37 |
max_tokens=max_tokens,
|
|
|
42 |
chunk = response.choices[0].delta.get("content", "")
|
43 |
streamed_response += chunk
|
44 |
yield streamed_response
|
45 |
+
|
46 |
except Exception as e:
|
47 |
yield f"**Error:** {str(e)}"
|
48 |
|
49 |
+
# Latest updates (this is just placeholder text)
|
50 |
latest_updates = """
|
51 |
**Chatbot - Latest Updates:**
|
52 |
|
53 |
+
* Multiple Model Support
|
54 |
+
* Improved Error Handling
|
55 |
+
* Enhanced System Message Input
|
56 |
"""
|
57 |
|
58 |
def show_updates_and_respond(history, system_message, max_tokens, temperature, top_p, model_name):
|
59 |
+
"""
|
60 |
+
Shows the latest updates and generates a response from the model based on the updates.
|
61 |
+
"""
|
62 |
+
history.append(("User", "Show me the latest updates"))
|
63 |
yield from respond(
|
64 |
message="Show me the latest updates",
|
65 |
history=history,
|
|
|
69 |
top_p=top_p,
|
70 |
model_name=model_name,
|
71 |
)
|
72 |
+
history[-1] = ("User", "Show me the latest updates")
|
73 |
+
history.append(("Assistant", latest_updates))
|
74 |
yield from respond(
|
75 |
message="What are the latest updates?",
|
76 |
history=history,
|
|
|
81 |
model_name=model_name,
|
82 |
)
|
83 |
|
84 |
+
# Define the Gradio interface with the Blocks context
|
85 |
with gr.Blocks(css=".gradio-container {border: none;}") as demo:
|
86 |
chat_history = gr.State([]) # Initialize an empty chat history state
|
87 |
system_message = gr.Textbox(
|
|
|
89 |
label="System message",
|
90 |
lines=2
|
91 |
)
|
92 |
+
max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
|
93 |
+
temperature = gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.1, label="Temperature")
|
94 |
+
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
|
95 |
+
model_dropdown = gr.Dropdown(choices=list(available_models.keys()), value="Zephyr 7B Beta", label="Select Model")
|
96 |
+
|
97 |
+
# Define the chat interface
|
|
|
|
|
|
|
98 |
chatbot = gr.Chatbot()
|
99 |
|
100 |
+
# Add the "Show Updates" button
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
updates_button = gr.Button("Show Latest Updates")
|
102 |
|
103 |
+
# Button click event
|
104 |
updates_button.click(
|
105 |
fn=show_updates_and_respond,
|
106 |
+
inputs=[chat_history, system_message, max_tokens, temperature, top_p, model_dropdown],
|
107 |
+
outputs=chat_history
|
108 |
)
|
109 |
|
110 |
+
# Launch the Gradio interface
|
111 |
if __name__ == "__main__":
|
112 |
+
demo.launch(share=True)
|