DmitryRyumin commited on
Commit
cac01bb
Β·
1 Parent(s): 7a4a029
app.css CHANGED
@@ -1,7 +1,9 @@
1
- .noti_err {
 
2
  color: var(--color-accent);
3
  }
4
- .noti_true {
 
5
  color: #006900;
6
  }
7
 
@@ -38,3 +40,11 @@ div.emo-stats:hover label[data-testid="block-label"],
38
  div.sent-stats:hover label[data-testid="block-label"] {
39
  display: none;
40
  }
 
 
 
 
 
 
 
 
 
1
+ .noti_err,
2
+ div.noti-results-false textarea {
3
  color: var(--color-accent);
4
  }
5
+ .noti_true,
6
+ div.noti-results-true textarea {
7
  color: #006900;
8
  }
9
 
 
40
  div.sent-stats:hover label[data-testid="block-label"] {
41
  display: none;
42
  }
43
+
44
+ div.noti-results textarea,
45
+ div.noti-results-false textarea,
46
+ div.noti-results-true textarea {
47
+ height: fit-content;
48
+ font-weight: bold;
49
+ font-size: 16px;
50
+ }
app/event_handlers/clear.py CHANGED
@@ -9,20 +9,22 @@ import gradio as gr
9
 
10
  # Importing necessary components for the Gradio app
11
  from app.config import config_data
12
- from app.components import html_message
13
 
14
 
15
  def event_handler_clear() -> (
16
- tuple[gr.Video, gr.Button, gr.Button, gr.HTML, gr.Plot, gr.Plot, gr.Plot, gr.Plot]
 
 
17
  ):
18
  return (
19
  gr.Video(value=None),
20
  gr.Button(interactive=False),
21
  gr.Button(interactive=False),
22
- html_message(
23
- message=config_data.InformationMessages_NOTI_RESULTS[0],
24
- error=True,
25
- visible=True,
 
26
  ),
27
  gr.Plot(value=None, visible=False),
28
  gr.Plot(value=None, visible=False),
 
9
 
10
  # Importing necessary components for the Gradio app
11
  from app.config import config_data
 
12
 
13
 
14
  def event_handler_clear() -> (
15
+ tuple[
16
+ gr.Video, gr.Button, gr.Button, gr.Textbox, gr.Plot, gr.Plot, gr.Plot, gr.Plot
17
+ ]
18
  ):
19
  return (
20
  gr.Video(value=None),
21
  gr.Button(interactive=False),
22
  gr.Button(interactive=False),
23
+ gr.Textbox(
24
+ value=config_data.InformationMessages_NOTI_RESULTS[0],
25
+ info=None,
26
+ container=False,
27
+ elem_classes="noti-results-false",
28
  ),
29
  gr.Plot(value=None, visible=False),
30
  gr.Plot(value=None, visible=False),
app/event_handlers/event_handlers.py CHANGED
@@ -17,7 +17,7 @@ def setup_app_event_handlers(
17
  video,
18
  clear,
19
  submit,
20
- noti_results,
21
  waveform,
22
  faces,
23
  emotion_stats,
@@ -27,7 +27,7 @@ def setup_app_event_handlers(
27
  triggers=[video.change, video.upload, video.stop_recording, video.clear],
28
  fn=event_handler_video,
29
  inputs=[video],
30
- outputs=[clear, submit, noti_results],
31
  queue=True,
32
  )
33
 
@@ -35,7 +35,7 @@ def setup_app_event_handlers(
35
  fn=event_handler_submit,
36
  inputs=[video],
37
  outputs=[
38
- noti_results,
39
  waveform,
40
  faces,
41
  emotion_stats,
@@ -51,7 +51,7 @@ def setup_app_event_handlers(
51
  video,
52
  clear,
53
  submit,
54
- noti_results,
55
  waveform,
56
  faces,
57
  emotion_stats,
 
17
  video,
18
  clear,
19
  submit,
20
+ text,
21
  waveform,
22
  faces,
23
  emotion_stats,
 
27
  triggers=[video.change, video.upload, video.stop_recording, video.clear],
28
  fn=event_handler_video,
29
  inputs=[video],
30
+ outputs=[clear, submit, text],
31
  queue=True,
32
  )
33
 
 
35
  fn=event_handler_submit,
36
  inputs=[video],
37
  outputs=[
38
+ text,
39
  waveform,
40
  faces,
41
  emotion_stats,
 
51
  video,
52
  clear,
53
  submit,
54
+ text,
55
  waveform,
56
  faces,
57
  emotion_stats,
app/event_handlers/submit.py CHANGED
@@ -39,13 +39,12 @@ from app.data_init import (
39
  text_model,
40
  )
41
  from app.load_models import VideoFeatureExtractor
42
- from app.components import html_message
43
 
44
 
45
  @spaces.GPU
46
  def event_handler_submit(
47
  video: str,
48
- ) -> tuple[gr.HTML, gr.Plot, gr.Plot, gr.Plot, gr.Plot]:
49
  if video:
50
  if video.split(".")[-1] == "webm":
51
  video = convert_webm_to_mp4(video)
@@ -167,10 +166,11 @@ def event_handler_submit(
167
  )
168
 
169
  return (
170
- html_message(
171
- message=config_data.InformationMessages_NOTI_RESULTS[1],
172
- error=False,
173
- visible=False,
 
174
  ),
175
  gr.Plot(value=plt_audio, visible=True),
176
  gr.Plot(value=plt_faces, visible=True),
 
39
  text_model,
40
  )
41
  from app.load_models import VideoFeatureExtractor
 
42
 
43
 
44
  @spaces.GPU
45
  def event_handler_submit(
46
  video: str,
47
+ ) -> tuple[gr.Textbox, gr.Plot, gr.Plot, gr.Plot, gr.Plot]:
48
  if video:
49
  if video.split(".")[-1] == "webm":
50
  video = convert_webm_to_mp4(video)
 
166
  )
167
 
168
  return (
169
+ gr.Textbox(
170
+ value=" ".join(total_text).strip(),
171
+ info=config_data.InformationMessages_REC_TEXT,
172
+ container=False,
173
+ elem_classes="noti-results",
174
  ),
175
  gr.Plot(value=plt_audio, visible=True),
176
  gr.Plot(value=plt_faces, visible=True),
app/event_handlers/video.py CHANGED
@@ -9,18 +9,18 @@ import gradio as gr
9
 
10
  # Importing necessary components for the Gradio app
11
  from app.config import config_data
12
- from app.components import html_message
13
 
14
 
15
- def event_handler_video(video: str) -> gr.HTML:
16
  is_video_valid = bool(video)
17
 
18
  return (
19
  gr.Button(interactive=is_video_valid),
20
  gr.Button(interactive=is_video_valid),
21
- html_message(
22
- message=config_data.InformationMessages_NOTI_RESULTS[int(is_video_valid)],
23
- error=not is_video_valid,
24
- visible=True,
 
25
  ),
26
  )
 
9
 
10
  # Importing necessary components for the Gradio app
11
  from app.config import config_data
 
12
 
13
 
14
+ def event_handler_video(video: str) -> tuple[gr.Button, gr.Button, gr.Textbox]:
15
  is_video_valid = bool(video)
16
 
17
  return (
18
  gr.Button(interactive=is_video_valid),
19
  gr.Button(interactive=is_video_valid),
20
+ gr.Textbox(
21
+ value=config_data.InformationMessages_NOTI_RESULTS[int(is_video_valid)],
22
+ info=None,
23
+ container=False,
24
+ elem_classes="noti-results-" + str(is_video_valid).lower(),
25
  ),
26
  )
app/tabs.py CHANGED
@@ -10,7 +10,6 @@ import gradio as gr
10
  # Importing necessary components for the Gradio app
11
  from app.description import DESCRIPTION
12
  from app.config import config_data
13
- from app.components import html_message
14
  from app.requirements_app import read_requirements
15
 
16
 
@@ -65,13 +64,7 @@ def app_tab():
65
  elem_classes="submit",
66
  )
67
 
68
- gr.Examples(
69
- [
70
- "videos/1.mp4",
71
- "videos/2.mp4",
72
- ],
73
- [video],
74
- )
75
 
76
  with gr.Column(
77
  visible=True,
@@ -79,10 +72,23 @@ def app_tab():
79
  variant="default",
80
  elem_classes="results-container",
81
  ):
82
- noti_results = html_message(
83
- message=config_data.InformationMessages_NOTI_RESULTS[0],
84
- error=True,
 
 
 
 
 
 
85
  visible=True,
 
 
 
 
 
 
 
86
  )
87
 
88
  waveform = gr.Plot(
@@ -121,7 +127,7 @@ def app_tab():
121
  video,
122
  clear,
123
  submit,
124
- noti_results,
125
  waveform,
126
  faces,
127
  emotion_stats,
 
10
  # Importing necessary components for the Gradio app
11
  from app.description import DESCRIPTION
12
  from app.config import config_data
 
13
  from app.requirements_app import read_requirements
14
 
15
 
 
64
  elem_classes="submit",
65
  )
66
 
67
+ gr.Examples(config_data.StaticPaths_EXAMPLES, [video])
 
 
 
 
 
 
68
 
69
  with gr.Column(
70
  visible=True,
 
72
  variant="default",
73
  elem_classes="results-container",
74
  ):
75
+ text = gr.Textbox(
76
+ value=config_data.InformationMessages_NOTI_RESULTS[0],
77
+ max_lines=10,
78
+ placeholder=None,
79
+ label=None,
80
+ info=None,
81
+ show_label=False,
82
+ container=False,
83
+ interactive=False,
84
  visible=True,
85
+ autofocus=False,
86
+ autoscroll=True,
87
+ render=True,
88
+ type="text",
89
+ show_copy_button=False,
90
+ max_length=config_data.General_TEXT_MAX_LENGTH,
91
+ elem_classes="noti-results-false",
92
  )
93
 
94
  waveform = gr.Plot(
 
127
  video,
128
  clear,
129
  submit,
130
+ text,
131
  waveform,
132
  faces,
133
  emotion_stats,
config.toml CHANGED
@@ -3,6 +3,7 @@ APP_VERSION = "0.0.1"
3
  CSS_PATH = "app.css"
4
 
5
  [General]
 
6
  SR = 16000
7
  START_TIME = 0
8
  WIN_MAX_LENGTH = 4
@@ -28,6 +29,7 @@ NOTI_RESULTS = [
28
  "Upload or record video",
29
  "Video uploaded, you can perform calculations",
30
  ]
 
31
 
32
  [OtherMessages]
33
  CLEAR = "Clear"
@@ -59,6 +61,10 @@ EMO_SENT_TEXT_WEIGHTS = "emo_sent_text_weights.pth"
59
  EMO_SENT_VIDEO_WEIGHTS = "emo_sent_video_weights.pth"
60
  YOLOV8N_FACE = "yolov8n-face.pt"
61
  OPENAI_WHISPER = "openai/whisper-base"
 
 
 
 
62
 
63
  [Requirements]
64
  LIBRARY = "Library"
 
3
  CSS_PATH = "app.css"
4
 
5
  [General]
6
+ TEXT_MAX_LENGTH = 1000
7
  SR = 16000
8
  START_TIME = 0
9
  WIN_MAX_LENGTH = 4
 
29
  "Upload or record video",
30
  "Video uploaded, you can perform calculations",
31
  ]
32
+ REC_TEXT = "Recognized text"
33
 
34
  [OtherMessages]
35
  CLEAR = "Clear"
 
61
  EMO_SENT_VIDEO_WEIGHTS = "emo_sent_video_weights.pth"
62
  YOLOV8N_FACE = "yolov8n-face.pt"
63
  OPENAI_WHISPER = "openai/whisper-base"
64
+ EXAMPLES = [
65
+ "videos/1.mp4",
66
+ "videos/2.mp4",
67
+ ]
68
 
69
  [Requirements]
70
  LIBRARY = "Library"