gabriel lopez commited on
Commit
38fdc49
1 Parent(s): 55e7d5f

working prototype

Browse files
Files changed (3) hide show
  1. .gitignore +2 -0
  2. app.py +61 -0
  3. core.py +0 -0
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ .env
2
+ .git*
app.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import TFAutoModelForCausalLM, AutoTokenizer
2
+ import tensorflow as tf
3
+ import gradio as gr
4
+
5
+ TITLE = "DialoGPT -- Chatbot"
6
+ DESCRIPTION = "<center>Have funny/existencial dialogs with non-human entities</center>"
7
+ EXAMPLES = [
8
+ ["How will the world end?"],
9
+ ["Does the universe have a purpose?"],
10
+ ["Is the universe infinite?"],
11
+ ["Was Einstein right about time being relative?"],
12
+ ["What is Pythagoras theorem?"],
13
+ ["What is the meaning of life?"],
14
+ ]
15
+ ARTICLE = r"""<center>
16
+ This application allowsa you to talk with a machine.
17
+ In the back-end I'm using the DialoGPT model from microsoft.<br>
18
+ This model extends GPT2 towards the conversational neural response generetion domain.<br>
19
+ ArXiv paper: https://arxiv.org/abs/1911.00536<br>
20
+ Done by dr. Gabriel Lopez<br>
21
+ For more please visit: <a href='https://sites.google.com/view/dr-gabriel-lopez/home'>My Page</a><br>
22
+ </center>"""
23
+
24
+ tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
25
+ model = TFAutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
26
+
27
+
28
+ def chat_with_bot(user_input, chat_history_and_input=[]):
29
+ emb_user_input = tokenizer.encode(
30
+ user_input + tokenizer.eos_token, return_tensors="tf"
31
+ )
32
+ print("chat_history:", chat_history_and_input)
33
+ print("emb_user_input:", emb_user_input)
34
+ if chat_history_and_input == []:
35
+ bot_input_ids = emb_user_input # first iteration
36
+ else:
37
+ bot_input_ids = tf.concat(
38
+ [chat_history_and_input, emb_user_input], axis=-1
39
+ ) # other iterations
40
+ chat_history_and_input = model.generate(
41
+ bot_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id
42
+ ).numpy()
43
+ # print
44
+ bot_response = tokenizer.decode(
45
+ chat_history_and_input[:, bot_input_ids.shape[-1] :][0],
46
+ skip_special_tokens=True,
47
+ )
48
+ print(f"{bot_response=}")
49
+ print(f"{chat_history_and_input=}")
50
+ return bot_response, chat_history_and_input
51
+
52
+ gr.Interface(
53
+ inputs=["text", "state"],
54
+ outputs=["text", "state"],
55
+ examples=EXAMPLES,
56
+ title=TITLE,
57
+ description=DESCRIPTION,
58
+ article=ARTICLE,
59
+ fn=chat_with_bot,
60
+ allow_flagging=False,
61
+ ).launch()
core.py ADDED
File without changes