william4416 commited on
Commit
23d9cc4
·
verified ·
1 Parent(s): b633d0c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -4
app.py CHANGED
@@ -1,7 +1,42 @@
 
1
  import gradio as gr
 
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
 
5
 
6
- iface = gr.Interface(fn=greet, inputs="text", outputs="text")
7
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoModelForCausalLM, AutoTokenizer
2
  import gradio as gr
3
+ import torch
4
 
5
+ title = "Custom AI ChatBot"
6
+ description = "A State-of-the-Art Large-scale Pretrained Response generation model (DialoGPT)"
7
+ examples = [["How are you?"]]
8
 
9
+ tokenizer = AutoTokenizer.from_pretrained("william4416/bewtestingone")
10
+ model = AutoModelForCausalLM.from_pretrained("william4416/bewtestingone")
11
+
12
+ def predict(input, history=[]):
13
+ # tokenize the new input sentence
14
+ new_user_input_ids = tokenizer.encode(
15
+ input + tokenizer.eos_token, return_tensors="pt"
16
+ )
17
+
18
+ # append the new user input tokens to the chat history
19
+ bot_input_ids = torch.cat([torch.LongTensor(history), new_user_input_ids], dim=-1)
20
+
21
+ # generate a response
22
+ history = model.generate(
23
+ bot_input_ids, max_length=4000, pad_token_id=tokenizer.eos_token_id
24
+ ).tolist()
25
+
26
+ # convert the tokens to text
27
+ response = tokenizer.decode(history[0])
28
+ return response, history
29
+
30
+ def main():
31
+ gr.Interface(
32
+ fn=predict,
33
+ title=title,
34
+ description=description,
35
+ examples=examples,
36
+ inputs=["text", "state"],
37
+ outputs=["text", "state"],
38
+ theme="finlaymacklon/boxy_violet",
39
+ ).launch()
40
+
41
+ if __name__ == "__main__":
42
+ main()