Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import pipeline | |
import time | |
# ✅ Load a free, smart, no-auth model | |
chatbot = pipeline("text-generation", model="tiiuae/falcon-rw-1b") | |
# 🧠 Store history | |
chat_histories = {} | |
# 🔥 Auto chatbot personality detector | |
def detect_personality(message): | |
msg = message.lower() | |
if any(word in msg for word in ["book", "learn", "subject", "exam", "teacher"]): | |
return "Educational" | |
elif any(word in msg for word in ["call", "complain", "refund", "issue", "product", "service"]): | |
return "Customer Care" | |
elif any(word in msg for word in ["remind", "note", "task", "weather", "time", "schedule"]): | |
return "PA" | |
elif any(word in msg for word in ["remember", "where", "keep", "store", "memory"]): | |
return "Memory" | |
return "General" | |
# 🧠 The AI brain | |
def chat(user_id, message): | |
if user_id not in chat_histories: | |
chat_histories[user_id] = [] | |
chatbot_type = detect_personality(message) | |
prompt = f"""You are a helpful {chatbot_type} AI chatbot. Be polite, smart, and helpful. | |
Conversation so far: | |
""" | |
for role, msg in chat_histories[user_id][-5:]: | |
prompt += f"{role}: {msg}\n" | |
prompt += f"User: {message}\nAI:" | |
# Generate reply | |
response = chatbot(prompt, max_new_tokens=100, temperature=0.7)[0]["generated_text"] | |
# Extract only new AI part | |
ai_reply = response.split("AI:")[-1].strip() | |
# Update memory | |
chat_histories[user_id].append(("User", message)) | |
chat_histories[user_id].append(("AI", ai_reply)) | |
return ai_reply | |
# Gradio UI | |
with gr.Blocks() as demo: | |
gr.Markdown("## 🤖 Multi-AI Chatbot (Memory, PA, Education, Customer Care)") | |
user_id = gr.Textbox(label="User ID (for memory)", value="test_user", visible=False) | |
chatbox = gr.Chatbot() | |
msg = gr.Textbox(label="Your Message") | |
send = gr.Button("Send") | |
def user_send(u_id, m, history): | |
reply = chat(u_id, m) | |
history.append((m, reply)) | |
return "", history | |
send.click(user_send, inputs=[user_id, msg, chatbox], outputs=[msg, chatbox]) | |
demo.launch() | |