Faizal2805 commited on
Commit
43a23d4
·
verified ·
1 Parent(s): 73b5cfa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -17
app.py CHANGED
@@ -1,23 +1,41 @@
1
- from transformers import AutoModelForCausalLM, AutoTokenizer
2
  import gradio as gr
3
- import os # Import os to read environment variables
 
 
 
 
 
 
 
 
4
 
5
- # Get the API token securely from Hugging Face Secrets
6
- HF_AUTH_TOKEN = os.getenv("HF_AUTH_TOKEN")
 
 
7
 
8
- # Replace this with your model
9
- MODEL_NAME = "mistralai/Mistral-7B-Instruct"
 
 
 
 
 
 
 
10
 
11
- # Load tokenizer & model with authentication
12
- tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, token=HF_AUTH_TOKEN)
13
- model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, token=HF_AUTH_TOKEN)
 
 
 
 
 
 
14
 
15
- # Define Gradio chat function
16
- def chat_with_ai(message):
17
- inputs = tokenizer(message, return_tensors="pt")
18
- outputs = model.generate(**inputs, max_length=500)
19
- return tokenizer.decode(outputs[0])
20
 
21
- # Create Gradio Interface
22
- iface = gr.Interface(fn=chat_with_ai, inputs="text", outputs="text")
23
- iface.launch()
 
1
+ import torch
2
  import gradio as gr
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+
5
+ # Load the model and tokenizer
6
+ MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.1"
7
+
8
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
9
+ model = AutoModelForCausalLM.from_pretrained(
10
+ MODEL_NAME, torch_dtype=torch.float16, device_map="auto"
11
+ )
12
 
13
+ # Chat function
14
+ def chat(user_input, history):
15
+ messages = [{"role": "user", "content": user_input}]
16
+ encoded = tokenizer.apply_chat_template(messages, return_tensors="pt").to(model.device)
17
 
18
+ # Generate response
19
+ with torch.no_grad():
20
+ output_ids = model.generate(encoded, max_new_tokens=256, do_sample=True)
21
+
22
+ response = tokenizer.decode(output_ids[0], skip_special_tokens=True)
23
+
24
+ # Append to chat history
25
+ history.append((user_input, response))
26
+ return history
27
 
28
+ # Create Gradio Chatbot UI
29
+ with gr.Blocks() as demo:
30
+ gr.Markdown("# 🤖 Mistral-7B Chatbot")
31
+
32
+ chatbot = gr.Chatbot()
33
+ user_input = gr.Textbox(label="Type your message here...")
34
+
35
+ def respond(message, chat_history):
36
+ return chat(message, chat_history)
37
 
38
+ user_input.submit(respond, [user_input, chatbot], chatbot)
 
 
 
 
39
 
40
+ # Launch for Hugging Face Spaces
41
+ demo.launch()