Spaces:
Running
Running
jeffrey
commited on
Commit
•
55755d8
1
Parent(s):
2a58fa0
fix upstage api key check error
Browse files- app.py +3 -2
- src/util.py +3 -3
app.py
CHANGED
@@ -10,8 +10,7 @@ from autorag.data.qa.schema import Raw
|
|
10 |
from llama_index.llms.openai import OpenAI
|
11 |
|
12 |
from src.create import default_create, fast_create, advanced_create
|
13 |
-
from src.util import on_submit_openai_key
|
14 |
-
|
15 |
|
16 |
root_dir = os.path.dirname(os.path.realpath(__file__))
|
17 |
FILE_DIR = os.path.join(root_dir, "file_cache")
|
@@ -233,6 +232,8 @@ with gr.Blocks(theme="earneleh/paris") as demo:
|
|
233 |
# API Key visibility
|
234 |
parse_choice.change(change_visible_status_api_key, inputs=[parse_choice],
|
235 |
outputs=[llama_cloud_api_key_row, upstage_api_key_row])
|
|
|
|
|
236 |
|
237 |
|
238 |
demo.launch(share=False, debug=True, allowed_paths=[FILE_DIR, DATA_DIR])
|
|
|
10 |
from llama_index.llms.openai import OpenAI
|
11 |
|
12 |
from src.create import default_create, fast_create, advanced_create
|
13 |
+
from src.util import on_submit_openai_key, on_submit_llama_cloud_key, on_submit_upstage_key
|
|
|
14 |
|
15 |
root_dir = os.path.dirname(os.path.realpath(__file__))
|
16 |
FILE_DIR = os.path.join(root_dir, "file_cache")
|
|
|
232 |
# API Key visibility
|
233 |
parse_choice.change(change_visible_status_api_key, inputs=[parse_choice],
|
234 |
outputs=[llama_cloud_api_key_row, upstage_api_key_row])
|
235 |
+
llama_key_textbox.submit(on_submit_llama_cloud_key, inputs=[llama_key_textbox], outputs=llama_key_status_box)
|
236 |
+
upstage_key_textbox.submit(on_submit_upstage_key, inputs=[upstage_key_textbox], outputs=upstage_key_status_box)
|
237 |
|
238 |
|
239 |
demo.launch(share=False, debug=True, allowed_paths=[FILE_DIR, DATA_DIR])
|
src/util.py
CHANGED
@@ -52,10 +52,10 @@ def on_submit_upstage_key(upstage_key):
|
|
52 |
response: ChatResponse = llm.chat(messages=[
|
53 |
ChatMessage(role="system", content="You are a helpful assistant."),
|
54 |
ChatMessage(role="user", content="Hi, how are you?")
|
55 |
-
],
|
56 |
|
57 |
-
assert isinstance(response.message, str)
|
58 |
-
assert bool(response.message)
|
59 |
return "Setting complete."
|
60 |
except:
|
61 |
gr.Error("Upstage API key is invalid.", duration=3)
|
|
|
52 |
response: ChatResponse = llm.chat(messages=[
|
53 |
ChatMessage(role="system", content="You are a helpful assistant."),
|
54 |
ChatMessage(role="user", content="Hi, how are you?")
|
55 |
+
], max_tokens=3)
|
56 |
|
57 |
+
assert isinstance(response.message.content, str)
|
58 |
+
assert bool(response.message.content)
|
59 |
return "Setting complete."
|
60 |
except:
|
61 |
gr.Error("Upstage API key is invalid.", duration=3)
|