Maharshi Gor
commited on
Commit
·
4f5d1cb
1
Parent(s):
97fcd0c
Enhances model selection and logging in pipeline components; adds logprobs support and improves UI feedback for disabled sliders.
Browse files- src/app_configs.py +2 -0
- src/components/model_pipeline/model_pipeline.py +4 -2
- src/components/model_pipeline/state_manager.py +2 -1
- src/components/model_pipeline/tossup_pipeline.py +32 -7
- src/components/model_step/model_step.py +3 -3
- src/display/custom_css.py +4 -0
- src/display/formatting.py +4 -0
- src/workflows/structs.py +2 -2
src/app_configs.py
CHANGED
@@ -6,9 +6,11 @@ UNSELECTED_PIPELINE_NAME = "Select Pipeline to Import..."
|
|
6 |
AVAILABLE_MODELS = {
|
7 |
"OpenAI/gpt-4o": {
|
8 |
"model": "gpt-4o-2024-11-20",
|
|
|
9 |
},
|
10 |
"OpenAI/gpt-4o-mini": {
|
11 |
"model": "gpt-4o-mini-2024-07-18",
|
|
|
12 |
},
|
13 |
"OpenAI/gpt-3.5-turbo": {
|
14 |
"model": "gpt-3.5-turbo-0125",
|
|
|
6 |
AVAILABLE_MODELS = {
|
7 |
"OpenAI/gpt-4o": {
|
8 |
"model": "gpt-4o-2024-11-20",
|
9 |
+
"logprobs": True,
|
10 |
},
|
11 |
"OpenAI/gpt-4o-mini": {
|
12 |
"model": "gpt-4o-mini-2024-07-18",
|
13 |
+
"logprobs": True,
|
14 |
},
|
15 |
"OpenAI/gpt-3.5-turbo": {
|
16 |
"model": "gpt-3.5-turbo-0125",
|
src/components/model_pipeline/model_pipeline.py
CHANGED
@@ -74,6 +74,7 @@ class PipelineInterface:
|
|
74 |
self.ui_state = make_state(ui_state)
|
75 |
self.pipeline_state = make_state(PipelineState(workflow=workflow, ui_state=ui_state))
|
76 |
self.variables_state = make_state(workflow.get_available_variables())
|
|
|
77 |
|
78 |
self.sm = PipelineStateManager()
|
79 |
self.input_variables = workflow.inputs
|
@@ -107,7 +108,7 @@ class PipelineInterface:
|
|
107 |
step_interface.on_model_step_change(
|
108 |
self.sm.update_model_step_state,
|
109 |
inputs=[self.pipeline_state, step_interface.model_step_state, step_interface.ui_state],
|
110 |
-
outputs=[self.pipeline_state, self.ui_state, self.variables_state],
|
111 |
)
|
112 |
|
113 |
step_interface.on_ui_change(
|
@@ -239,7 +240,7 @@ class PipelineInterface:
|
|
239 |
@gr.render(inputs=[self.pipeline_state, self.ui_state], concurrency_limit=1, concurrency_id="render_steps")
|
240 |
def render_steps(state: PipelineState, ui_state: PipelineUIState):
|
241 |
"""Render all steps in the pipeline"""
|
242 |
-
logger.info(f"
|
243 |
workflow = state.workflow
|
244 |
components = []
|
245 |
|
@@ -262,6 +263,7 @@ class PipelineInterface:
|
|
262 |
concurrency_id="render_output_fields",
|
263 |
)
|
264 |
def render_output_fields(available_variables, pipeline_state):
|
|
|
265 |
self._render_output_panel(available_variables, pipeline_state)
|
266 |
|
267 |
export_btn = gr.Button("Export Pipeline", elem_classes="export-button")
|
|
|
74 |
self.ui_state = make_state(ui_state)
|
75 |
self.pipeline_state = make_state(PipelineState(workflow=workflow, ui_state=ui_state))
|
76 |
self.variables_state = make_state(workflow.get_available_variables())
|
77 |
+
self.model_selection_state = make_state({})
|
78 |
|
79 |
self.sm = PipelineStateManager()
|
80 |
self.input_variables = workflow.inputs
|
|
|
108 |
step_interface.on_model_step_change(
|
109 |
self.sm.update_model_step_state,
|
110 |
inputs=[self.pipeline_state, step_interface.model_step_state, step_interface.ui_state],
|
111 |
+
outputs=[self.pipeline_state, self.ui_state, self.variables_state, self.model_selection_state],
|
112 |
)
|
113 |
|
114 |
step_interface.on_ui_change(
|
|
|
240 |
@gr.render(inputs=[self.pipeline_state, self.ui_state], concurrency_limit=1, concurrency_id="render_steps")
|
241 |
def render_steps(state: PipelineState, ui_state: PipelineUIState):
|
242 |
"""Render all steps in the pipeline"""
|
243 |
+
logger.info(f"Rerender triggered! Current UI State:{ui_state.model_dump()}")
|
244 |
workflow = state.workflow
|
245 |
components = []
|
246 |
|
|
|
263 |
concurrency_id="render_output_fields",
|
264 |
)
|
265 |
def render_output_fields(available_variables, pipeline_state):
|
266 |
+
logger.info(f"Rerendering output panel: {available_variables} {pipeline_state.workflow}")
|
267 |
self._render_output_panel(available_variables, pipeline_state)
|
268 |
|
269 |
export_btn = gr.Button("Export Pipeline", elem_classes="export-button")
|
src/components/model_pipeline/state_manager.py
CHANGED
@@ -176,7 +176,8 @@ class PipelineStateManager:
|
|
176 |
state.ui_state.steps[model_step.id] = ui_state.model_copy()
|
177 |
state.ui_state = state.ui_state.model_copy()
|
178 |
state.update_output_variables_mapping()
|
179 |
-
|
|
|
180 |
|
181 |
def update_output_variables(self, state: PipelineState, target: str, produced_variable: str):
|
182 |
if produced_variable == "Choose variable...":
|
|
|
176 |
state.ui_state.steps[model_step.id] = ui_state.model_copy()
|
177 |
state.ui_state = state.ui_state.model_copy()
|
178 |
state.update_output_variables_mapping()
|
179 |
+
model_selections = {step_id: step.get_full_model_name() for step_id, step in state.workflow.steps.items()}
|
180 |
+
return state, state.ui_state, state.available_variables, model_selections
|
181 |
|
182 |
def update_output_variables(self, state: PipelineState, target: str, produced_variable: str):
|
183 |
if produced_variable == "Choose variable...":
|
src/components/model_pipeline/tossup_pipeline.py
CHANGED
@@ -1,8 +1,10 @@
|
|
1 |
import gradio as gr
|
2 |
import numpy as np
|
|
|
3 |
|
4 |
from app_configs import AVAILABLE_MODELS, UNSELECTED_VAR_NAME
|
5 |
from components import commons
|
|
|
6 |
from workflows.structs import Buzzer, TossupWorkflow
|
7 |
|
8 |
from .model_pipeline import PipelineInterface, PipelineState, PipelineUIState
|
@@ -68,20 +70,36 @@ class TossupPipelineInterface(PipelineInterface):
|
|
68 |
def update_prob_slider(self, state: TossupPipelineState, answer_var: str, tokens_prob: float | None):
|
69 |
"""Update the probability slider based on the answer variable."""
|
70 |
if answer_var == UNSELECTED_VAR_NAME:
|
71 |
-
return
|
|
|
|
|
|
|
|
|
|
|
72 |
step_id = answer_var.split(".")[0]
|
73 |
-
model_name = state.workflow.steps[step_id].
|
74 |
model_config = AVAILABLE_MODELS[model_name]
|
75 |
is_model_with_logprobs = model_config.get("logprobs", False)
|
76 |
buzzer = state.workflow.buzzer
|
77 |
tokens_prob_threshold = tokens_prob if is_model_with_logprobs else None
|
|
|
78 |
state = self.update_buzzer(
|
79 |
state,
|
80 |
confidence_threshold=buzzer.confidence_threshold,
|
81 |
-
method=
|
82 |
tokens_prob=tokens_prob_threshold,
|
83 |
)
|
84 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
85 |
|
86 |
def _render_output_panel(self, available_variables: list[str], pipeline_state: TossupPipelineState):
|
87 |
dropdowns = {}
|
@@ -136,6 +154,7 @@ class TossupPipelineInterface(PipelineInterface):
|
|
136 |
step=0.001,
|
137 |
elem_classes="slider-container",
|
138 |
)
|
|
|
139 |
|
140 |
def update_choices(available_variables):
|
141 |
"""Update the choices for the dropdowns"""
|
@@ -168,8 +187,14 @@ class TossupPipelineInterface(PipelineInterface):
|
|
168 |
# TODO: Do Add model step change triggers as well. (Model name change triggers)
|
169 |
answer_dropdown = dropdowns["answer"]
|
170 |
if answer_dropdown is not None:
|
171 |
-
|
172 |
-
self.
|
|
|
173 |
inputs=[self.pipeline_state, answer_dropdown, self.prob_slider],
|
174 |
-
outputs=[
|
|
|
|
|
|
|
|
|
|
|
175 |
)
|
|
|
1 |
import gradio as gr
|
2 |
import numpy as np
|
3 |
+
from loguru import logger
|
4 |
|
5 |
from app_configs import AVAILABLE_MODELS, UNSELECTED_VAR_NAME
|
6 |
from components import commons
|
7 |
+
from display.formatting import tiny_styled_warning
|
8 |
from workflows.structs import Buzzer, TossupWorkflow
|
9 |
|
10 |
from .model_pipeline import PipelineInterface, PipelineState, PipelineUIState
|
|
|
70 |
def update_prob_slider(self, state: TossupPipelineState, answer_var: str, tokens_prob: float | None):
|
71 |
"""Update the probability slider based on the answer variable."""
|
72 |
if answer_var == UNSELECTED_VAR_NAME:
|
73 |
+
return (
|
74 |
+
state,
|
75 |
+
gr.update(interactive=True),
|
76 |
+
gr.update(value="AND", interactive=True),
|
77 |
+
gr.update(visible=False),
|
78 |
+
)
|
79 |
step_id = answer_var.split(".")[0]
|
80 |
+
model_name = state.workflow.steps[step_id].get_full_model_name()
|
81 |
model_config = AVAILABLE_MODELS[model_name]
|
82 |
is_model_with_logprobs = model_config.get("logprobs", False)
|
83 |
buzzer = state.workflow.buzzer
|
84 |
tokens_prob_threshold = tokens_prob if is_model_with_logprobs else None
|
85 |
+
method = buzzer.method if is_model_with_logprobs else "AND"
|
86 |
state = self.update_buzzer(
|
87 |
state,
|
88 |
confidence_threshold=buzzer.confidence_threshold,
|
89 |
+
method=method,
|
90 |
tokens_prob=tokens_prob_threshold,
|
91 |
)
|
92 |
+
return (
|
93 |
+
state,
|
94 |
+
gr.update(interactive=is_model_with_logprobs),
|
95 |
+
gr.update(value=method, interactive=is_model_with_logprobs),
|
96 |
+
gr.update(
|
97 |
+
value=tiny_styled_warning(
|
98 |
+
f"{model_name} does not support `logprobs`. The probability slider will be disabled."
|
99 |
+
),
|
100 |
+
visible=not is_model_with_logprobs,
|
101 |
+
),
|
102 |
+
)
|
103 |
|
104 |
def _render_output_panel(self, available_variables: list[str], pipeline_state: TossupPipelineState):
|
105 |
dropdowns = {}
|
|
|
154 |
step=0.001,
|
155 |
elem_classes="slider-container",
|
156 |
)
|
157 |
+
self.buzzer_warning_display = gr.HTML(visible=False)
|
158 |
|
159 |
def update_choices(available_variables):
|
160 |
"""Update the choices for the dropdowns"""
|
|
|
187 |
# TODO: Do Add model step change triggers as well. (Model name change triggers)
|
188 |
answer_dropdown = dropdowns["answer"]
|
189 |
if answer_dropdown is not None:
|
190 |
+
gr.on(
|
191 |
+
triggers=[answer_dropdown.input, self.model_selection_state.change],
|
192 |
+
fn=self.update_prob_slider,
|
193 |
inputs=[self.pipeline_state, answer_dropdown, self.prob_slider],
|
194 |
+
outputs=[
|
195 |
+
self.pipeline_state,
|
196 |
+
self.prob_slider,
|
197 |
+
self.buzzer_method_dropdown,
|
198 |
+
self.buzzer_warning_display,
|
199 |
+
],
|
200 |
)
|
src/components/model_step/model_step.py
CHANGED
@@ -304,7 +304,7 @@ class ModelStepComponent(FormComponent):
|
|
304 |
)
|
305 |
|
306 |
# Model and system prompt
|
307 |
-
self.model_selection.
|
308 |
fn=self.sm.update_model_and_provider,
|
309 |
inputs=[self.model_step_state, self.model_selection],
|
310 |
outputs=[self.model_step_state],
|
@@ -442,11 +442,11 @@ class ModelStepComponent(FormComponent):
|
|
442 |
|
443 |
def on_model_step_change(self, fn, inputs, outputs):
|
444 |
"""Set up an event listener for the model change event."""
|
445 |
-
self.model_step_state.change(fn, inputs, outputs)
|
446 |
|
447 |
def on_ui_change(self, fn, inputs, outputs):
|
448 |
"""Set up an event listener for the UI change event."""
|
449 |
-
self.ui_state.change(fn, inputs, outputs)
|
450 |
|
451 |
def _update_state_and_label(self, model_step: ModelStep, name: str):
|
452 |
"""Update both the state and the accordion label."""
|
|
|
304 |
)
|
305 |
|
306 |
# Model and system prompt
|
307 |
+
self.model_selection.input(
|
308 |
fn=self.sm.update_model_and_provider,
|
309 |
inputs=[self.model_step_state, self.model_selection],
|
310 |
outputs=[self.model_step_state],
|
|
|
442 |
|
443 |
def on_model_step_change(self, fn, inputs, outputs):
|
444 |
"""Set up an event listener for the model change event."""
|
445 |
+
return self.model_step_state.change(fn, inputs, outputs)
|
446 |
|
447 |
def on_ui_change(self, fn, inputs, outputs):
|
448 |
"""Set up an event listener for the UI change event."""
|
449 |
+
return self.ui_state.change(fn, inputs, outputs)
|
450 |
|
451 |
def _update_state_and_label(self, model_step: ModelStep, name: str):
|
452 |
"""Update both the state and the accordion label."""
|
src/display/custom_css.py
CHANGED
@@ -62,6 +62,10 @@ css_pipeline = """
|
|
62 |
gap: var(--spacing-md) !important;
|
63 |
}
|
64 |
|
|
|
|
|
|
|
|
|
65 |
.json-node {
|
66 |
/* On a light background (usually white), use darker and vivid colors */
|
67 |
font-size: var(--text-sm) !important;
|
|
|
62 |
gap: var(--spacing-md) !important;
|
63 |
}
|
64 |
|
65 |
+
input[type=range][disabled] {
|
66 |
+
opacity: .3;
|
67 |
+
}
|
68 |
+
|
69 |
.json-node {
|
70 |
/* On a light background (usually white), use darker and vivid colors */
|
71 |
font-size: var(--text-sm) !important;
|
src/display/formatting.py
CHANGED
@@ -15,6 +15,10 @@ def styled_warning(warn):
|
|
15 |
return f"<p style='color: orange; font-size: 20px; text-align: center;'>{warn}</p>"
|
16 |
|
17 |
|
|
|
|
|
|
|
|
|
18 |
def styled_message(message):
|
19 |
return f"<p style='color: green; font-size: 20px; text-align: center;'>{message}</p>"
|
20 |
|
|
|
15 |
return f"<p style='color: orange; font-size: 20px; text-align: center;'>{warn}</p>"
|
16 |
|
17 |
|
18 |
+
def tiny_styled_warning(warn):
|
19 |
+
return f"<p style='color: red; font-size: 12px; text-align: left;'>{warn}</p>"
|
20 |
+
|
21 |
+
|
22 |
def styled_message(message):
|
23 |
return f"<p style='color: green; font-size: 20px; text-align: center;'>{message}</p>"
|
24 |
|
src/workflows/structs.py
CHANGED
@@ -113,8 +113,8 @@ class ModelStep(BaseModel):
|
|
113 |
def fields(self, field_type: FieldType) -> list[InputField | OutputField]:
|
114 |
return self.input_fields if field_type == "input" else self.output_fields
|
115 |
|
116 |
-
def get_full_model_name(self):
|
117 |
-
return f"{self.provider}
|
118 |
|
119 |
def get_produced_variables(self) -> list[str]:
|
120 |
return [f"{self.id}.{field.name}" for field in self.output_fields if field.name]
|
|
|
113 |
def fields(self, field_type: FieldType) -> list[InputField | OutputField]:
|
114 |
return self.input_fields if field_type == "input" else self.output_fields
|
115 |
|
116 |
+
def get_full_model_name(self) -> str:
|
117 |
+
return f"{self.provider}/{self.model}"
|
118 |
|
119 |
def get_produced_variables(self) -> list[str]:
|
120 |
return [f"{self.id}.{field.name}" for field in self.output_fields if field.name]
|