thak123 commited on
Commit
b3b3581
·
1 Parent(s): 901886a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -16
app.py CHANGED
@@ -11,8 +11,8 @@ import gradio as gr
11
 
12
  # DEVICE = config.device
13
 
14
- # model = AutoModel.from_pretrained("thak123/bert-emoji-latvian-twitter-classifier")
15
- # tokenizer = AutoTokenizer.from_pretrained("FFZG-cleopatra/bert-emoji-latvian-twitter")
16
 
17
  # classifier = pipeline("sentiment-analysis",
18
  # model= model,
@@ -49,7 +49,7 @@ def preprocess(text):
49
 
50
 
51
  def sentence_prediction(sentence):
52
- sentence = preprocess(sentence)
53
  # model_path = config.MODEL_PATH
54
 
55
  # test_dataset = dataset.BERTDataset(
@@ -92,30 +92,30 @@ def sentence_prediction(sentence):
92
 
93
 
94
 
95
- def greet(name):
96
- return "Hello " + name + "!"
97
 
98
- demo = gr.Interface(
99
- fn=greet,
100
- inputs=gr.Textbox(lines=2, placeholder="Name Here..."),
101
- outputs="text",
102
- )
103
- demo.launch()
104
 
105
 
106
- import gradio as gr
107
 
108
- from transformers import pipeline
109
 
110
- pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es")
111
 
112
  def predict(text):
113
  return pipe(text)[0]["translation_text"]
114
 
115
  demo = gr.Interface(
116
- fn=predict,
117
  inputs='text',
118
- outputs='text',
119
  )
120
 
121
  demo.launch()
 
11
 
12
  # DEVICE = config.device
13
 
14
+ model = AutoModel.from_pretrained("thak123/bert-emoji-latvian-twitter-classifier")
15
+ tokenizer = AutoTokenizer.from_pretrained("FFZG-cleopatra/bert-emoji-latvian-twitter")
16
 
17
  # classifier = pipeline("sentiment-analysis",
18
  # model= model,
 
49
 
50
 
51
  def sentence_prediction(sentence):
52
+ # sentence = preprocess(sentence)
53
  # model_path = config.MODEL_PATH
54
 
55
  # test_dataset = dataset.BERTDataset(
 
92
 
93
 
94
 
95
+ # def greet(name):
96
+ # return "Hello " + name + "!"
97
 
98
+ # demo = gr.Interface(
99
+ # fn=greet,
100
+ # inputs=gr.Textbox(lines=2, placeholder="Name Here..."),
101
+ # outputs="text",
102
+ # )
103
+ # demo.launch()
104
 
105
 
106
+ # import gradio as gr
107
 
108
+ # from transformers import pipeline
109
 
110
+ # pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es")
111
 
112
  def predict(text):
113
  return pipe(text)[0]["translation_text"]
114
 
115
  demo = gr.Interface(
116
+ fn=sentence_prediction,
117
  inputs='text',
118
+ outputs='label',
119
  )
120
 
121
  demo.launch()