File size: 2,127 Bytes
ec942ed
 
ce46ee7
ec942ed
 
 
 
 
 
 
 
 
c7c9473
cd94be8
c7c9473
ec942ed
 
cd94be8
ec942ed
ba69275
ec942ed
 
 
 
 
 
ce46ee7
ec942ed
cd94be8
ec942ed
cd94be8
ec942ed
 
aa6fbe0
 
70ca1f7
 
ce46ee7
70ca1f7
 
 
ec942ed
ce46ee7
 
ec942ed
ce46ee7
 
 
 
 
 
 
 
 
 
 
ec942ed
ce46ee7
 
 
 
ec942ed
 
70ca1f7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import gradio as gr
import os
import time
from openai import OpenAI

# --- Configuration ---
client = OpenAI(
    base_url="https://openrouter.ai/api/v1",
    api_key=os.environ.get("OPENROUTER_API_KEY"),
)

REFERER_HEADER = "https://alphaai.org.in"
TITLE_HEADER = "AlphaAI Chatbot"
MODEL_NAME = "deepseek/deepseek-r1-0528-qwen3-8b:free"
ALPHAAI_CONTEXT = os.environ.get("ALPHAAI_CONTEXT", "")

# --- Chatbot Backend ---
def chat_with_llm(user_input):
    try:
        input_fixed = "Stick to the context of Alpha AI and do not let the user deviate from that. If the query is anything other than Alpha AI, politely refuse. Here is the user query - " + str(user_input)
        completion = client.chat.completions.create(
            extra_headers={
                "HTTP-Referer": REFERER_HEADER,
                "X-Title": TITLE_HEADER,
            },
            model=MODEL_NAME,
            messages=[{"role": "system", "content": ALPHAAI_CONTEXT}, {"role": "user", "content": input_fixed}]
        )
        return completion.choices[0].message.content
    except Exception as e:
        return f"Error: {str(e)}"

# --- Gradio UI ---
theme = gr.themes.Base()
with gr.Blocks(theme=theme) as demo:
    gr.Markdown("# 🧠 Alpha AI Assistant")
    gr.Markdown("Ask anything about Alpha AI.")

    chatbot = gr.Chatbot(type="messages")
    msg = gr.Textbox()
    clear = gr.Button("Clear")

    def user(user_message, history: list):
        return "", history + [{"role": "user", "content": user_message}]

    def bot(history: list):
        user_msg = history[-1]['content']
        try:
            response = chat_with_llm(user_msg)
        except Exception as e:
            response = f"Error: {str(e)}"
        history.append({"role": "assistant", "content": ""})
        for char in response:
            history[-1]['content'] += char
            time.sleep(0.02)
            yield history

    msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
        bot, chatbot, chatbot
    )
    clear.click(lambda: None, None, chatbot, queue=False)

# Launch (required for Hugging Face Space)
demo.launch()