clarkchan's picture
123
d3d9869
raw
history blame
391 Bytes
import gradio as gr
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("liam168/c2-roberta-base-finetuned-dianping-chinese")
def greet(name):
result = tokenizer([name],padding=True,truncation=True,return_tensors="pt")
tensor_str = ','.join(map(str, result.input_ids.numpy()))
iface = gr.Interface(fn=greet, inputs="text", outputs="text")
iface.launch()