from predictor import Predictor from transformers import pipeline from huggingface_hub import login from datetime import date import os import gradio as gr login(os.environ["HF_Token"]) paths = [ "data/W020230619818476939351.xls", "data/W020230619818476975218.xls" ] predictor = Predictor( pipelines={ "name": pipeline("nerpipe", model="minskiter/resume-token-classification-name-0708",trust_remote_code=True,use_auth_token=True), "common": pipeline("nerpipe",model="minskiter/resume-token-classification",trust_remote_code=True,use_auth_token=True) }, paths=paths, today=date(2023,4,1) ) def ner_predictor_gradio(input): entities = predictor(input) # flattern entities flatterns = [] for key in entities: if isinstance(entities[key],list): for item in entities[key]: if isinstance(item,list): for subitem in item: flatterns.append(subitem) else: flatterns.append(item) return {"text":input, "entities": flatterns} demo = gr.Interface( fn=ner_predictor_gradio, inputs=gr.Textbox(lines=5, label="输入你的简历"), outputs=gr.HighlightedText(label="简历识别结果"), ) demo.launch()