Spaces:
Running
Running
Richard
commited on
Commit
·
74dc293
1
Parent(s):
e535042
Minor fixes + remove generate buttons for now
Browse files- dialogs/prompt_variables.py +0 -11
- llm.py +2 -1
- main.py +22 -16
- requirements.txt +1 -1
dialogs/prompt_variables.py
CHANGED
@@ -17,17 +17,6 @@ def prompt_variables():
|
|
17 |
if not state.prompt_variables:
|
18 |
me.text("No variables defined in prompt.", style=me.Style(width=DIALOG_INPUT_WIDTH))
|
19 |
else:
|
20 |
-
with me.box(
|
21 |
-
style=me.Style(display="flex", justify_content="end", margin=me.Margin(bottom=15))
|
22 |
-
):
|
23 |
-
mex.button(
|
24 |
-
"Generate",
|
25 |
-
on_click=on_click_generate_variables,
|
26 |
-
style=me.Style(
|
27 |
-
background=me.theme_var("secondary-container"),
|
28 |
-
color=me.theme_var("on-secondary-container"),
|
29 |
-
),
|
30 |
-
)
|
31 |
variable_names = set(parse_variables(state.prompt))
|
32 |
with me.box(style=me.Style(display="flex", flex_direction="column")):
|
33 |
for name, value in state.prompt_variables.items():
|
|
|
17 |
if not state.prompt_variables:
|
18 |
me.text("No variables defined in prompt.", style=me.Style(width=DIALOG_INPUT_WIDTH))
|
19 |
else:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
variable_names = set(parse_variables(state.prompt))
|
21 |
with me.box(style=me.Style(display="flex", flex_direction="column")):
|
22 |
for name, value in state.prompt_variables.items():
|
llm.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
import json
|
2 |
import os
|
3 |
|
@@ -60,7 +61,7 @@ def generate_prompt(task_description: str, model_name: str, temperature: float)
|
|
60 |
|
61 |
|
62 |
def generate_variables(
|
63 |
-
prompt: str, variable_names:
|
64 |
) -> dict[str, str]:
|
65 |
model = _make_model(model_name, temperature=temperature)
|
66 |
output = (
|
|
|
1 |
+
from collections.abc import Collection
|
2 |
import json
|
3 |
import os
|
4 |
|
|
|
61 |
|
62 |
|
63 |
def generate_variables(
|
64 |
+
prompt: str, variable_names: Collection[str], model_name: str, temperature: float
|
65 |
) -> dict[str, str]:
|
66 |
model = _make_model(model_name, temperature=temperature)
|
67 |
output = (
|
main.py
CHANGED
@@ -94,10 +94,13 @@ def app():
|
|
94 |
if state.mode == "Prompt":
|
95 |
# Render prompt creation page
|
96 |
with me.box(
|
97 |
-
style=me.Style(padding=me.Padding(left=15, top=15, bottom=15), overflow_y="scroll")
|
98 |
):
|
99 |
with me.accordion():
|
100 |
-
with me.expansion_panel(
|
|
|
|
|
|
|
101 |
me.native_textarea(
|
102 |
autosize=True,
|
103 |
min_rows=2,
|
@@ -108,7 +111,11 @@ def app():
|
|
108 |
key="system_instructions",
|
109 |
)
|
110 |
|
111 |
-
with me.expansion_panel(
|
|
|
|
|
|
|
|
|
112 |
me.native_textarea(
|
113 |
autosize=True,
|
114 |
min_rows=2,
|
@@ -120,7 +127,12 @@ def app():
|
|
120 |
)
|
121 |
|
122 |
with me.box(
|
123 |
-
style=me.Style(
|
|
|
|
|
|
|
|
|
|
|
124 |
):
|
125 |
with me.content_button(
|
126 |
type="flat",
|
@@ -130,25 +142,19 @@ def app():
|
|
130 |
):
|
131 |
with me.tooltip(message="Run prompt"):
|
132 |
me.icon("play_arrow")
|
133 |
-
mex.button(
|
134 |
-
"Generate prompt",
|
135 |
-
disabled=bool(state.prompt),
|
136 |
-
style=me.Style(
|
137 |
-
background=me.theme_var("secondary-container"),
|
138 |
-
color=me.theme_var("on-secondary-container"),
|
139 |
-
),
|
140 |
-
on_click=handlers.on_open_dialog,
|
141 |
-
key="dialog_show_generate_prompt",
|
142 |
-
)
|
143 |
|
144 |
with me.box(style=me.Style(padding=me.Padding.all(15), overflow_y="scroll")):
|
145 |
if state.response:
|
146 |
-
with me.card(
|
|
|
|
|
147 |
me.card_header(title="Response")
|
148 |
with me.card_content():
|
149 |
mex.markdown(state.response, has_copy_to_clipboard=True)
|
150 |
else:
|
151 |
-
with me.card(
|
|
|
|
|
152 |
me.card_header(title="Prompt Tuner Instructions")
|
153 |
with me.card_content():
|
154 |
mex.markdown(_INSTRUCTIONS, has_copy_to_clipboard=True)
|
|
|
94 |
if state.mode == "Prompt":
|
95 |
# Render prompt creation page
|
96 |
with me.box(
|
97 |
+
style=me.Style(padding=me.Padding(left=15, top=15, bottom=15, right=2), overflow_y="scroll")
|
98 |
):
|
99 |
with me.accordion():
|
100 |
+
with me.expansion_panel(
|
101 |
+
title="System Instructions",
|
102 |
+
style=me.Style(background=me.theme_var("surface-container-lowest")),
|
103 |
+
):
|
104 |
me.native_textarea(
|
105 |
autosize=True,
|
106 |
min_rows=2,
|
|
|
111 |
key="system_instructions",
|
112 |
)
|
113 |
|
114 |
+
with me.expansion_panel(
|
115 |
+
title="Prompt",
|
116 |
+
expanded=True,
|
117 |
+
style=me.Style(background=me.theme_var("surface-container-lowest")),
|
118 |
+
):
|
119 |
me.native_textarea(
|
120 |
autosize=True,
|
121 |
min_rows=2,
|
|
|
127 |
)
|
128 |
|
129 |
with me.box(
|
130 |
+
style=me.Style(
|
131 |
+
align_items="center",
|
132 |
+
display="flex",
|
133 |
+
justify_content="space-between",
|
134 |
+
margin=me.Margin(top=15),
|
135 |
+
)
|
136 |
):
|
137 |
with me.content_button(
|
138 |
type="flat",
|
|
|
142 |
):
|
143 |
with me.tooltip(message="Run prompt"):
|
144 |
me.icon("play_arrow")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
145 |
|
146 |
with me.box(style=me.Style(padding=me.Padding.all(15), overflow_y="scroll")):
|
147 |
if state.response:
|
148 |
+
with me.card(
|
149 |
+
appearance="raised", style=me.Style(background=me.theme_var("surface-container-lowest"))
|
150 |
+
):
|
151 |
me.card_header(title="Response")
|
152 |
with me.card_content():
|
153 |
mex.markdown(state.response, has_copy_to_clipboard=True)
|
154 |
else:
|
155 |
+
with me.card(
|
156 |
+
appearance="raised", style=me.Style(background=me.theme_var("surface-container-lowest"))
|
157 |
+
):
|
158 |
me.card_header(title="Prompt Tuner Instructions")
|
159 |
with me.card_content():
|
160 |
mex.markdown(_INSTRUCTIONS, has_copy_to_clipboard=True)
|
requirements.txt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
gunicorn
|
2 |
-
mesop==0.
|
3 |
google-generativeai
|
|
|
1 |
gunicorn
|
2 |
+
mesop==0.13.0
|
3 |
google-generativeai
|