Update app.py
Browse files
app.py
CHANGED
@@ -2,8 +2,9 @@ import gradio as gr
|
|
2 |
from gradio_client import Client
|
3 |
from huggingface_hub import InferenceClient
|
4 |
import random
|
|
|
5 |
ss_client = Client("https://omnibus-html-image-current-tab.hf.space/")
|
6 |
-
|
7 |
models=[
|
8 |
"google/gemma-7b",
|
9 |
"google/gemma-7b-it",
|
@@ -14,7 +15,21 @@ models=[
|
|
14 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
15 |
"JunRyeol/jr_model",
|
16 |
]
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
def format_prompt_default(message, history,cust_p):
|
20 |
prompt = ""
|
@@ -193,6 +208,7 @@ with gr.Blocks() as app:
|
|
193 |
with gr.Group():
|
194 |
with gr.Row():
|
195 |
with gr.Column(scale=3):
|
|
|
196 |
inp = gr.Textbox(label="Prompt")
|
197 |
sys_inp = gr.Textbox(label="System Prompt (optional)")
|
198 |
with gr.Accordion("Prompt Format",open=False):
|
@@ -204,6 +220,7 @@ with gr.Blocks() as app:
|
|
204 |
with gr.Group():
|
205 |
stop_btn=gr.Button("Stop")
|
206 |
clear_btn=gr.Button("Clear")
|
|
|
207 |
client_choice=gr.Dropdown(label="Models",type='index',choices=[c for c in models],value=models[0],interactive=True)
|
208 |
with gr.Column(scale=1):
|
209 |
with gr.Group():
|
@@ -226,7 +243,8 @@ with gr.Blocks() as app:
|
|
226 |
wait_time=gr.Number(label="Wait Time",value=3000)
|
227 |
theme=gr.Radio(label="Theme", choices=["light","dark"],value="light")
|
228 |
chatblock=gr.Dropdown(label="Chatblocks",info="Choose specific blocks of chat",choices=[c for c in range(1,40)],multiselect=True)
|
229 |
-
|
|
|
230 |
|
231 |
client_choice.change(load_models,client_choice,[chat_b,custom_prompt,model_state])
|
232 |
app.load(load_models,client_choice,[chat_b,custom_prompt,model_state])
|
|
|
2 |
from gradio_client import Client
|
3 |
from huggingface_hub import InferenceClient
|
4 |
import random
|
5 |
+
from models import models
|
6 |
ss_client = Client("https://omnibus-html-image-current-tab.hf.space/")
|
7 |
+
'''
|
8 |
models=[
|
9 |
"google/gemma-7b",
|
10 |
"google/gemma-7b-it",
|
|
|
15 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
16 |
"JunRyeol/jr_model",
|
17 |
]
|
18 |
+
'''
|
19 |
+
def test_models():
|
20 |
+
log_box=[]
|
21 |
+
for model in models:
|
22 |
+
try:
|
23 |
+
model_state= InferenceClient(model)
|
24 |
+
outp=client.text_generation("What is a cat")
|
25 |
+
log = {"Model":model,"Success":outp}
|
26 |
+
print(log)
|
27 |
+
log_box.append(log)
|
28 |
+
except Exception as e:
|
29 |
+
log = {"Model":model,"Error":e}
|
30 |
+
print(log)
|
31 |
+
log_box.append(log)
|
32 |
+
return log_box
|
33 |
|
34 |
def format_prompt_default(message, history,cust_p):
|
35 |
prompt = ""
|
|
|
208 |
with gr.Group():
|
209 |
with gr.Row():
|
210 |
with gr.Column(scale=3):
|
211 |
+
|
212 |
inp = gr.Textbox(label="Prompt")
|
213 |
sys_inp = gr.Textbox(label="System Prompt (optional)")
|
214 |
with gr.Accordion("Prompt Format",open=False):
|
|
|
220 |
with gr.Group():
|
221 |
stop_btn=gr.Button("Stop")
|
222 |
clear_btn=gr.Button("Clear")
|
223 |
+
test_btn=gr.Button("Test")
|
224 |
client_choice=gr.Dropdown(label="Models",type='index',choices=[c for c in models],value=models[0],interactive=True)
|
225 |
with gr.Column(scale=1):
|
226 |
with gr.Group():
|
|
|
243 |
wait_time=gr.Number(label="Wait Time",value=3000)
|
244 |
theme=gr.Radio(label="Theme", choices=["light","dark"],value="light")
|
245 |
chatblock=gr.Dropdown(label="Chatblocks",info="Choose specific blocks of chat",choices=[c for c in range(1,40)],multiselect=True)
|
246 |
+
test_json=gr.JSON(label="Test Output")
|
247 |
+
test_btn.click(test_models,None,test_json)
|
248 |
|
249 |
client_choice.change(load_models,client_choice,[chat_b,custom_prompt,model_state])
|
250 |
app.load(load_models,client_choice,[chat_b,custom_prompt,model_state])
|