Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -10,16 +10,10 @@ client = InferenceClient("unsloth/Llama-3.2-1B-Instruct")
|
|
10 |
|
11 |
def respond(
|
12 |
message,
|
13 |
-
history: list[tuple[str, str]] = None
|
14 |
):
|
15 |
if history is None:
|
16 |
-
history = [
|
17 |
-
(
|
18 |
-
None,
|
19 |
-
"Hi there! I'm your Dietician Assistant, here to help with general advice on diet, "
|
20 |
-
"nutrition, and healthy eating habits. Let's explore your questions.",
|
21 |
-
)
|
22 |
-
]
|
23 |
|
24 |
# System message describing the assistant's role
|
25 |
system_message = (
|
@@ -63,11 +57,19 @@ def respond(
|
|
63 |
yield response
|
64 |
|
65 |
|
66 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
with gr.Blocks() as demo:
|
68 |
-
gr.ChatInterface(respond
|
69 |
-
|
70 |
-
|
|
|
|
|
71 |
|
72 |
if __name__ == "__main__":
|
73 |
demo.launch()
|
|
|
10 |
|
11 |
def respond(
|
12 |
message,
|
13 |
+
history: list[tuple[str, str]] = None # Default history as None to avoid mutable issues
|
14 |
):
|
15 |
if history is None:
|
16 |
+
history = []
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
|
18 |
# System message describing the assistant's role
|
19 |
system_message = (
|
|
|
57 |
yield response
|
58 |
|
59 |
|
60 |
+
def default_message():
|
61 |
+
"""Function to return initial default message."""
|
62 |
+
return [("Hi there! I'm your Dietician Assistant, here to help with general advice "
|
63 |
+
"on diet, nutrition, and healthy eating habits. Let's explore your questions.", "")]
|
64 |
+
|
65 |
+
|
66 |
+
# Set up the Gradio ChatInterface with an initial default message
|
67 |
with gr.Blocks() as demo:
|
68 |
+
chatbot = gr.ChatInterface(respond)
|
69 |
+
|
70 |
+
# Display the default message on load
|
71 |
+
gr.State(default_message()) # Store initial chat history
|
72 |
+
chatbot.history = default_message() # Set the chat history to show the greeting
|
73 |
|
74 |
if __name__ == "__main__":
|
75 |
demo.launch()
|