abrar-adnan commited on
Commit
d0d585b
1 Parent(s): 6d1ef96

output fixed

Browse files
Files changed (1) hide show
  1. app.py +16 -9
app.py CHANGED
@@ -24,6 +24,17 @@ backends = [
24
  ]
25
 
26
  emotion_pipeline = pipeline("text-classification", model="cardiffnlp/twitter-roberta-base-emotion")
 
 
 
 
 
 
 
 
 
 
 
27
 
28
  def getTranscription(path):
29
  # Insert Local Video File Path
@@ -49,13 +60,6 @@ def getTranscription(path):
49
  transcription = processor.batch_decode(predicted_ids, skip_special_tokens=True)
50
 
51
  return transcription[0]
52
-
53
-
54
- model = load_learner("gaze-recognizer-v3.pkl")
55
-
56
- def analyze_emotion(text):
57
- result = emotion_pipeline(text)
58
- return result
59
 
60
  def video_processing(video_file, encoded_video):
61
  angry = 0
@@ -82,7 +86,9 @@ def video_processing(video_file, encoded_video):
82
  print(transcription)
83
  text_emotion = analyze_emotion(transcription)
84
  print(text_emotion)
85
-
 
 
86
  video_capture = cv2.VideoCapture(video_file)
87
  on_camera = 0
88
  off_camera = 0
@@ -189,7 +195,8 @@ def video_processing(video_file, encoded_video):
189
  print(f'total sad percentage = {sad}')
190
  print(f'total surprise percentage = {surprise}')
191
  print(f'total neutral percentage = {neutral}')
192
- return str(gaze_percentage)
 
193
 
194
 
195
  demo = gr.Interface(fn=video_processing,
 
24
  ]
25
 
26
  emotion_pipeline = pipeline("text-classification", model="cardiffnlp/twitter-roberta-base-emotion")
27
+ sentiment_pipeline = pipeline("sentiment-analysis", model="distilbert-base-uncased-finetuned-sst-2-english")
28
+
29
+ model = load_learner("gaze-recognizer-v3.pkl")
30
+
31
+ def analyze_emotion(text):
32
+ result = emotion_pipeline(text)
33
+ return result
34
+
35
+ def analyze_sentiment(text):
36
+ result = sentiment_pipeline(text)
37
+ return result
38
 
39
  def getTranscription(path):
40
  # Insert Local Video File Path
 
60
  transcription = processor.batch_decode(predicted_ids, skip_special_tokens=True)
61
 
62
  return transcription[0]
 
 
 
 
 
 
 
63
 
64
  def video_processing(video_file, encoded_video):
65
  angry = 0
 
86
  print(transcription)
87
  text_emotion = analyze_emotion(transcription)
88
  print(text_emotion)
89
+ text_sentiment = analyze_sentiment(transcription)
90
+ print(text_sentiment)
91
+
92
  video_capture = cv2.VideoCapture(video_file)
93
  on_camera = 0
94
  off_camera = 0
 
195
  print(f'total sad percentage = {sad}')
196
  print(f'total surprise percentage = {surprise}')
197
  print(f'total neutral percentage = {neutral}')
198
+ final_result = "Gaze = "+str(gaze_percentage)+"Text Emotion"+str(text_emotion)+"Text transcription"+str(transcription)+"Text sentiment"+str(text_sentiment)
199
+ return final_result
200
 
201
 
202
  demo = gr.Interface(fn=video_processing,