import gc import os import csv import socket import huggingface_hub import gradio as gr import pandas as pd from huggingface_hub import Repository from transformers import AutoTokenizer, AutoModelWithLMHead ## connection with HF datasets HF_TOKEN = os.environ.get("HF_TOKEN") DATASET_NAME = "emotion_detection" DATASET_REPO_URL = f"https://huggingface.co/datasets/pragnakalp/{DATASET_NAME}" DATA_FILENAME = "emotion_detection_logs.csv" DATA_FILE = os.path.join("emotion_detection_logs", DATA_FILENAME) DATASET_REPO_ID = "pragnakalp/emotion_detection" print("is none?", HF_TOKEN is None) try: hf_hub_download( repo_id=DATASET_REPO_ID, filename=DATA_FILENAME, cache_dir=DATA_DIRNAME, force_filename=DATA_FILENAME ) except: print("file not found") repo = Repository( local_dir="emotion_detection_logs", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN ) SENTENCES_VALUE = """Raj loves Simran.\nLast year I lost my Dog.\nI bought a new phone!\nShe is scared of cockroaches.\nWow! I was not expecting that.\nShe got mad at him.""" ## load model cwd = os.getcwd() model_path = os.path.join(cwd) tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-emotion") model_base = AutoModelWithLMHead.from_pretrained(model_path) """ get ip address """ def get_device_ip_address(): result = {} if os.name == "nt": result = "Running on Windows" hostname = socket.gethostname() ip_address = socket.gethostbyname(hostname) result['ip_addr'] = ip_address result['host'] = hostname print(result) return result elif os.name == "posix": gw = os.popen("ip -4 route show default").read().split() s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect((gw[2], 0)) ipaddr = s.getsockname()[0] gateway = gw[2] host = socket.gethostname() result['ip_addr'] = ipaddr result['host'] = host print(result) return result else: result['id'] = os.name + " not supported yet." print(result) return result """ generate emotions of the sentences """ def get_emotion(text): # input_ids = tokenizer.encode(text + '', return_tensors='pt') input_ids = tokenizer.encode(text, return_tensors='pt') output = model_base.generate(input_ids=input_ids, max_length=2) dec = [tokenizer.decode(ids) for ids in output] label = dec[0] gc.collect() return label def generate_emotion(article): sen_list = article sen_list = sen_list.split('\n') sen_list_temp = sen_list[0:] print(sen_list_temp) results_dict = [] results = [] for sen in sen_list_temp: if(sen.strip()): cur_result = get_emotion(sen) results.append(cur_result) results_dict.append( { 'sentence': sen, 'emotion': cur_result } ) result = {'Input':sen_list_temp, 'Detected Emotion':results} gc.collect() save_data_and_sendmail(results_dict,sen_list, results) return pd.DataFrame(result) """ Save generated details """ def save_data_and_sendmail(results_dict,sen_list,results): try: hostname = {} add_csv = [results_dict] with open(DATA_FILE, "a") as f: writer = csv.writer(f) # write the data writer.writerow(add_csv) commit_url = repo.push_to_hub() print("commit data :",commit_url) hostname = get_device_ip_address() print("hostname ",hostname) url = 'https://pragnakalpdev35.pythonanywhere.com/hf_space_emotion_detection' # url = 'http://pragnakalpdev33.pythonanywhere.com/HF_space_question_generator' myobj = {'sen_list': sen_list,'gen_results': results,'ip_addr':hostname.get("ip_addr",""),'host':hostname.get("host","")} print("myobj ",myobj) x = requests.post(url, json = myobj) except Exception as e: return "Error while sending mail" + str(e) return "Successfully save data" """ UI design for demo using gradio app """ inputs = gr.Textbox(value=SENTENCES_VALUE,lines=10, label="Sentences",elem_id="inp_div") outputs = [gr.Dataframe(row_count = (2, "dynamic"), col_count=(2, "fixed"), label="Here is the Result", headers=["Input","Detected Emotion"])] demo = gr.Interface( generate_emotion, inputs, outputs, title="Emotion Detection", description="Feel free to give your feedback", css=".gradio-container {background-color: lightgray} #inp_div {background-color: #FB3D5;}" ) demo.launch()