Spaces:
Running
Running
jhj0517
commited on
Commit
·
364597e
1
Parent(s):
c14cab5
Add info parameters in Advanced Parameters
Browse files
app.py
CHANGED
@@ -73,18 +73,28 @@ class App:
|
|
73 |
cb_timestamp = gr.Checkbox(value=True, label="Add a timestamp to the end of the filename",
|
74 |
interactive=True)
|
75 |
with gr.Accordion("Advanced Parameters", open=False):
|
76 |
-
nb_beam_size = gr.Number(label="Beam Size", value=1, precision=0, interactive=True
|
77 |
-
|
78 |
-
|
|
|
|
|
|
|
79 |
dd_compute_type = gr.Dropdown(label="Compute Type", choices=self.whisper_inf.available_compute_types,
|
80 |
-
value=self.whisper_inf.current_compute_type, interactive=True
|
81 |
-
|
82 |
-
|
|
|
|
|
|
|
83 |
cb_condition_on_previous_text = gr.Checkbox(label="Condition On Previous Text", value=True,
|
84 |
-
interactive=True
|
85 |
-
|
86 |
-
|
87 |
-
|
|
|
|
|
|
|
|
|
88 |
with gr.Group(visible=isinstance(self.whisper_inf, FasterWhisperInference)):
|
89 |
nb_length_penalty = gr.Number(label="Length Penalty", value=1,
|
90 |
info="Exponential length penalty constant.")
|
|
|
73 |
cb_timestamp = gr.Checkbox(value=True, label="Add a timestamp to the end of the filename",
|
74 |
interactive=True)
|
75 |
with gr.Accordion("Advanced Parameters", open=False):
|
76 |
+
nb_beam_size = gr.Number(label="Beam Size", value=1, precision=0, interactive=True,
|
77 |
+
info="Beam size to use for decoding.")
|
78 |
+
nb_log_prob_threshold = gr.Number(label="Log Probability Threshold", value=-1.0, interactive=True,
|
79 |
+
info="If the average log probability over sampled tokens is below this value, treat as failed.")
|
80 |
+
nb_no_speech_threshold = gr.Number(label="No Speech Threshold", value=0.6, interactive=True,
|
81 |
+
info="If the No Speech Probability is higher than this value AND the average log probability over sampled tokens is below 'Log Prob Threshold', consider the segment as silent.")
|
82 |
dd_compute_type = gr.Dropdown(label="Compute Type", choices=self.whisper_inf.available_compute_types,
|
83 |
+
value=self.whisper_inf.current_compute_type, interactive=True,
|
84 |
+
info="Select the type of computation to perform.")
|
85 |
+
nb_best_of = gr.Number(label="Best Of", value=5, interactive=True,
|
86 |
+
info="Number of candidates when sampling with non-zero temperature.")
|
87 |
+
nb_patience = gr.Number(label="Patience", value=1, interactive=True,
|
88 |
+
info="Beam search patience factor.")
|
89 |
cb_condition_on_previous_text = gr.Checkbox(label="Condition On Previous Text", value=True,
|
90 |
+
interactive=True,
|
91 |
+
info="Condition on previous text during decoding.")
|
92 |
+
tb_initial_prompt = gr.Textbox(label="Initial Prompt", value=None, interactive=True,
|
93 |
+
info="Initial prompt to use for decoding.")
|
94 |
+
sd_temperature = gr.Slider(label="Temperature", value=0, step=0.01, maximum=1.0, interactive=True,
|
95 |
+
info="Temperature for sampling. It can be a tuple of temperatures, which will be successively used upon failures according to either `compression_ratio_threshold` or `log_prob_threshold`.")
|
96 |
+
nb_compression_ratio_threshold = gr.Number(label="Compression Ratio Threshold", value=2.4, interactive=True,
|
97 |
+
info="If the gzip compression ratio is above this value, treat as failed.")
|
98 |
with gr.Group(visible=isinstance(self.whisper_inf, FasterWhisperInference)):
|
99 |
nb_length_penalty = gr.Number(label="Length Penalty", value=1,
|
100 |
info="Exponential length penalty constant.")
|