feng2022 commited on
Commit
3557c00
1 Parent(s): 801b823

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -8
app.py CHANGED
@@ -2,6 +2,7 @@
2
 
3
  from __future__ import annotations
4
 
 
5
  import argparse
6
  import functools
7
  import os
@@ -39,7 +40,6 @@ TOKEN = "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"
39
 
40
 
41
  pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es")
42
- scores = []
43
 
44
  def parse_args() -> argparse.Namespace:
45
  parser = argparse.ArgumentParser()
@@ -71,10 +71,20 @@ def load_model(file_name: str, path:str,device: torch.device) -> nn.Module:
71
  def predict(text):
72
  return pipe(text)[0]["translation_text"]
73
 
74
- def track_score(score):
75
- scores.append(score)
76
- top_scores = sorted(scores, reverse=True)[:3]
77
- return top_scores
 
 
 
 
 
 
 
 
 
 
78
 
79
  def main():
80
  #torch.cuda.init()
@@ -118,10 +128,12 @@ def main():
118
  #server_port=args.port,
119
  #share=args.share,
120
  #)
 
121
  demo = gr.Interface(
122
- track_score,
123
- gr.Number(label="Score"),
124
- gr.JSON(label="Top Scores")
 
125
  )
126
  demo.launch()
127
 
 
2
 
3
  from __future__ import annotations
4
 
5
+ import random
6
  import argparse
7
  import functools
8
  import os
 
40
 
41
 
42
  pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es")
 
43
 
44
  def parse_args() -> argparse.Namespace:
45
  parser = argparse.ArgumentParser()
 
71
  def predict(text):
72
  return pipe(text)[0]["translation_text"]
73
 
74
+ def chat(message, history):
75
+ history = history or []
76
+ message = message.lower()
77
+ if message.startswith("how many"):
78
+ response = random.randint(1, 10)
79
+ elif message.startswith("how"):
80
+ response = random.choice(["Great", "Good", "Okay", "Bad"])
81
+ elif message.startswith("where"):
82
+ response = random.choice(["Here", "There", "Somewhere"])
83
+ else:
84
+ response = "I don't know"
85
+ history.append((message, response))
86
+ return history, history
87
+
88
 
89
  def main():
90
  #torch.cuda.init()
 
128
  #server_port=args.port,
129
  #share=args.share,
130
  #)
131
+ chatbot = gr.Chatbot().style(color_map=("green", "pink"))
132
  demo = gr.Interface(
133
+ chat,
134
+ ["text", "state"],
135
+ [chatbot, "state"],
136
+ allow_flagging="never",
137
  )
138
  demo.launch()
139