jophex commited on
Commit
d673e94
·
verified ·
1 Parent(s): 37d1349

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -1
app.py CHANGED
@@ -1,3 +1,44 @@
1
  import gradio as gr
2
 
3
- gr.load("models/microsoft/Phi-3.5-mini-instruct").launch(share=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
 
3
+ # gr.load("models/microsoft/Phi-3.5-mini-instruct").launch(share=True)
4
+
5
+ import gradio as gr
6
+ from transformers import AutoTokenizer, AutoModelForCausalLM
7
+ import torch
8
+
9
+ # Load the model and tokenizer
10
+ tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3.5-mini-instruct", trust_remote_code=True, use_auth_token=True)
11
+ model = AutoModelForCausalLM.from_pretrained("microsoft/Phi-3.5-mini-instruct", trust_remote_code=True, use_auth_token=True)
12
+
13
+ # Define the role prompt for advertisement assistance
14
+ role_prompt = "You are an advertisement assistant. Respond professionally and helpfully to advertising-related questions.\n\n"
15
+
16
+ # Function to generate responses
17
+ def generate_response(user_input):
18
+ # Prepend role information to user input
19
+ input_text = role_prompt + user_input
20
+
21
+ # Tokenize and generate response
22
+ inputs = tokenizer(input_text, return_tensors="pt")
23
+ outputs = model.generate(
24
+ **inputs,
25
+ max_new_tokens=50, # Increase this if you want longer responses
26
+ temperature=0.7, # Adjust for creativity
27
+ top_p=0.9 # Nucleus sampling to control randomness
28
+ )
29
+
30
+ # Decode and return the response
31
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
32
+ return response
33
+
34
+ # Set up Gradio interface
35
+ interface = gr.Interface(
36
+ fn=generate_response,
37
+ inputs="text",
38
+ outputs="text",
39
+ title="Advertisement Assistant Chatbot",
40
+ description="Ask me anything related to advertising. I'm here to help!"
41
+ )
42
+
43
+ # Launch the Gradio app with sharing enabled
44
+ interface.launch(share=True)