Spaces:
Runtime error
Runtime error
# -*- coding:utf-8 -*- | |
import os | |
import logging | |
import sys | |
import gradio as gr | |
from modules.utils import * | |
from modules.presets import * | |
from modules.overwrites import * | |
from modules.chat_func import * | |
from modules.openai_func import get_usage | |
logging.basicConfig( | |
level=logging.DEBUG, | |
format="%(asctime)s [%(levelname)s] [%(filename)s:%(lineno)d] %(message)s", | |
) | |
my_api_key = "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" # 在这里输入你的 API 密钥 | |
# if we are running in Docker | |
if os.environ.get("dockerrun") == "yes": | |
dockerflag = True | |
else: | |
dockerflag = False | |
authflag = False | |
auth_list = [] | |
if not my_api_key: | |
my_api_key = os.environ.get("my_api_key") | |
if dockerflag: | |
if my_api_key == "empty": | |
logging.error("Please give a api key!") | |
sys.exit(1) | |
# auth | |
username = os.environ.get("USERNAME") | |
password = os.environ.get("PASSWORD") | |
if not (isinstance(username, type(None)) or isinstance(password, type(None))): | |
auth_list.append((os.environ.get("USERNAME"), os.environ.get("PASSWORD"))) | |
authflag = True | |
else: | |
if ( | |
not my_api_key | |
and os.path.exists("api_key.txt") | |
and os.path.getsize("api_key.txt") | |
): | |
with open("api_key.txt", "r") as f: | |
my_api_key = f.read().strip() | |
if os.path.exists("auth.json"): | |
authflag = True | |
with open("auth.json", "r", encoding='utf-8') as f: | |
auth = json.load(f) | |
for _ in auth: | |
if auth[_]["username"] and auth[_]["password"]: | |
auth_list.append((auth[_]["username"], auth[_]["password"])) | |
else: | |
logging.error("Please check the username and password in the auth.json file!") | |
sys.exit(1) | |
gr.Chatbot.postprocess = postprocess | |
PromptHelper.compact_text_chunks = compact_text_chunks | |
with open("assets/custom.css", "r", encoding="utf-8") as f: | |
customCSS = f.read() | |
with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo: | |
history = gr.State([]) | |
token_count = gr.State([]) | |
promptTemplates = gr.State(load_template(get_template_names(plain=True)[0], mode=2)) | |
user_api_key = gr.State(my_api_key) | |
user_question = gr.State("") | |
outputing = gr.State(False) | |
topic = gr.State("Conversation history is not named") | |
with gr.Row(): | |
with gr.Column(scale=1): | |
# gr.HTML(title) | |
gr.HTML('<h1>SamGPT</h1>') | |
with gr.Column(scale=4): | |
# gr.HTML('<center><a href="https://huggingface.co/spaces/JohnSmith9982/ChuanhuChatGPT?duplicate=true"><img src="https://bit.ly/3gLdBN6" alt="Duplicate Space"></a>Duplicate the Space and run securely with your OpenAI API Key</center>') | |
pass | |
with gr.Column(scale=4): | |
status_display = gr.Markdown(get_geoip(), elem_id="status_display") | |
with gr.Row().style(equal_height=True): | |
with gr.Column(scale=5): | |
with gr.Row(): | |
chatbot = gr.Chatbot(elem_id="chuanhu_chatbot").style(height="100%") | |
with gr.Row(): | |
with gr.Column(scale=12): | |
user_input = gr.Textbox( | |
show_label=False, placeholder="Enter here" | |
).style(container=False) | |
with gr.Column(min_width=70, scale=1): | |
submitBtn = gr.Button("Send", variant="primary") | |
cancelBtn = gr.Button("Cancel", variant="secondary", visible=False) | |
with gr.Row(): | |
emptyBtn = gr.Button( | |
"🧹 New conversations", | |
) | |
retryBtn = gr.Button("🔄 Rebuild") | |
delFirstBtn = gr.Button("🗑️ Delete the oldest conversation") | |
delLastBtn = gr.Button("🗑️ Delete the most recent conversation") | |
reduceTokenBtn = gr.Button("♻️ Summarize") | |
with gr.Column(): | |
with gr.Column(min_width=50, scale=1): | |
with gr.Tab(label="ChatGPT"): | |
keyTxt = gr.Textbox( | |
show_label=True, | |
placeholder=f"OpenAI API-key...", | |
value=hide_middle_chars(my_api_key), | |
type="password", | |
visible=not HIDE_MY_KEY, | |
label="API-Key", | |
) | |
usageTxt = gr.Markdown("**Send Message** or **Submit Key** to show credit", elem_id="usage_display") | |
model_select_dropdown = gr.Dropdown( | |
label="Select the model", choices=MODELS, multiselect=False, value=MODELS[0] | |
) | |
use_streaming_checkbox = gr.Checkbox( | |
label="Transmit answers in real time", value=True, visible=enable_streaming_option | |
) | |
use_websearch_checkbox = gr.Checkbox(label="Use online search", value=False) | |
language_select_dropdown = gr.Dropdown( | |
label="Select the reply language (for the Search & Index feature.)", | |
choices=REPLY_LANGUAGES, | |
multiselect=False, | |
value=REPLY_LANGUAGES[0], | |
) | |
index_files = gr.Files(label="Upload the index file", type="file", multiple=True) | |
with gr.Tab(label="Prompt"): | |
systemPromptTxt = gr.Textbox( | |
show_label=True, | |
placeholder=f"Enter here System Prompt...", | |
label="System prompt", | |
value=initial_prompt, | |
lines=10, | |
).style(container=False) | |
with gr.Accordion(label="Load the Prompt template", open=True): | |
with gr.Column(): | |
with gr.Row(): | |
with gr.Column(scale=6): | |
templateFileSelectDropdown = gr.Dropdown( | |
label="Select the Prompt template collection file", | |
choices=get_template_names(plain=True), | |
multiselect=False, | |
value=get_template_names(plain=True)[0], | |
).style(container=False) | |
with gr.Column(scale=1): | |
templateRefreshBtn = gr.Button("🔄 flushed") | |
with gr.Row(): | |
with gr.Column(): | |
templateSelectDropdown = gr.Dropdown( | |
label="Load from the Prompt template", | |
choices=load_template( | |
get_template_names(plain=True)[0], mode=1 | |
), | |
multiselect=False, | |
value=load_template( | |
get_template_names(plain=True)[0], mode=1 | |
)[0], | |
).style(container=False) | |
with gr.Tab(label="Save/Load"): | |
with gr.Accordion(label="Save/load conversation history", open=True): | |
with gr.Column(): | |
with gr.Row(): | |
with gr.Column(scale=6): | |
historyFileSelectDropdown = gr.Dropdown( | |
label="Load the conversation from the list", | |
choices=get_history_names(plain=True), | |
multiselect=False, | |
value=get_history_names(plain=True)[0], | |
) | |
with gr.Column(scale=1): | |
historyRefreshBtn = gr.Button("🔄 flushed") | |
with gr.Row(): | |
with gr.Column(scale=6): | |
saveFileName = gr.Textbox( | |
show_label=True, | |
placeholder=f"Set the file name: The default is.json,Optional.md", | |
label="Set the save file name", | |
value="Conversation history", | |
).style(container=True) | |
with gr.Column(scale=1): | |
saveHistoryBtn = gr.Button("💾 Save the conversation") | |
exportMarkdownBtn = gr.Button("📝 Export as Markdown") | |
gr.Markdown("By default, it is saved in the history folder") | |
with gr.Row(): | |
with gr.Column(): | |
downloadFile = gr.File(interactive=True) | |
with gr.Tab(label="Senior"): | |
gr.Markdown("# ⚠️ Be sure to change it with caution ⚠️\n\nIf it is not available, restore the default settings") | |
default_btn = gr.Button("🔙 Restore the default settings") | |
with gr.Accordion("parameter", open=False): | |
top_p = gr.Slider( | |
minimum=-0, | |
maximum=1.0, | |
value=1.0, | |
step=0.05, | |
interactive=True, | |
label="Top-p", | |
) | |
temperature = gr.Slider( | |
minimum=-0, | |
maximum=2.0, | |
value=1.0, | |
step=0.1, | |
interactive=True, | |
label="Temperature", | |
) | |
with gr.Accordion("Network settings", open=False, visible=False): | |
apiurlTxt = gr.Textbox( | |
show_label=True, | |
placeholder=f"Enter the API address here...", | |
label="API address", | |
value="https://api.openai.com/v1/chat/completions", | |
lines=2, | |
) | |
changeAPIURLBtn = gr.Button("🔄 Switch the API address") | |
proxyTxt = gr.Textbox( | |
show_label=True, | |
placeholder=f"Enter the proxy address here...", | |
label="Proxy address (Example:http://127.0.0.1:10809)", | |
value="", | |
lines=2, | |
) | |
changeProxyBtn = gr.Button("🔄 Set the proxy address") | |
gr.Markdown(description) | |
gr.HTML(footer.format(versions=versions_html()), elem_id="footer") | |
chatgpt_predict_args = dict( | |
fn=predict, | |
inputs=[ | |
user_api_key, | |
systemPromptTxt, | |
history, | |
user_question, | |
chatbot, | |
token_count, | |
top_p, | |
temperature, | |
use_streaming_checkbox, | |
model_select_dropdown, | |
use_websearch_checkbox, | |
index_files, | |
language_select_dropdown, | |
], | |
outputs=[chatbot, history, status_display, token_count], | |
show_progress=True, | |
) | |
start_outputing_args = dict( | |
fn=start_outputing, | |
inputs=[], | |
outputs=[submitBtn, cancelBtn], | |
show_progress=True, | |
) | |
end_outputing_args = dict( | |
fn=end_outputing, inputs=[], outputs=[submitBtn, cancelBtn] | |
) | |
reset_textbox_args = dict( | |
fn=reset_textbox, inputs=[], outputs=[user_input] | |
) | |
transfer_input_args = dict( | |
fn=transfer_input, inputs=[user_input], outputs=[user_question, user_input, submitBtn, cancelBtn], show_progress=True | |
) | |
get_usage_args = dict( | |
fn=get_usage, inputs=[user_api_key], outputs=[usageTxt], show_progress=False | |
) | |
# Chatbot | |
cancelBtn.click(cancel_outputing, [], []) | |
user_input.submit(**transfer_input_args).then(**chatgpt_predict_args).then(**end_outputing_args) | |
user_input.submit(**get_usage_args) | |
submitBtn.click(**transfer_input_args).then(**chatgpt_predict_args).then(**end_outputing_args) | |
submitBtn.click(**get_usage_args) | |
emptyBtn.click( | |
reset_state, | |
outputs=[chatbot, history, token_count, status_display], | |
show_progress=True, | |
) | |
emptyBtn.click(**reset_textbox_args) | |
retryBtn.click(**start_outputing_args).then( | |
retry, | |
[ | |
user_api_key, | |
systemPromptTxt, | |
history, | |
chatbot, | |
token_count, | |
top_p, | |
temperature, | |
use_streaming_checkbox, | |
model_select_dropdown, | |
language_select_dropdown, | |
], | |
[chatbot, history, status_display, token_count], | |
show_progress=True, | |
).then(**end_outputing_args) | |
retryBtn.click(**get_usage_args) | |
delFirstBtn.click( | |
delete_first_conversation, | |
[history, token_count], | |
[history, token_count, status_display], | |
) | |
delLastBtn.click( | |
delete_last_conversation, | |
[chatbot, history, token_count], | |
[chatbot, history, token_count, status_display], | |
show_progress=True, | |
) | |
reduceTokenBtn.click( | |
reduce_token_size, | |
[ | |
user_api_key, | |
systemPromptTxt, | |
history, | |
chatbot, | |
token_count, | |
top_p, | |
temperature, | |
gr.State(sum(token_count.value[-4:])), | |
model_select_dropdown, | |
language_select_dropdown, | |
], | |
[chatbot, history, status_display, token_count], | |
show_progress=True, | |
) | |
reduceTokenBtn.click(**get_usage_args) | |
# ChatGPT | |
keyTxt.change(submit_key, keyTxt, [user_api_key, status_display]).then(**get_usage_args) | |
keyTxt.submit(**get_usage_args) | |
# Template | |
templateRefreshBtn.click(get_template_names, None, [templateFileSelectDropdown]) | |
templateFileSelectDropdown.change( | |
load_template, | |
[templateFileSelectDropdown], | |
[promptTemplates, templateSelectDropdown], | |
show_progress=True, | |
) | |
templateSelectDropdown.change( | |
get_template_content, | |
[promptTemplates, templateSelectDropdown, systemPromptTxt], | |
[systemPromptTxt], | |
show_progress=True, | |
) | |
# S&L | |
saveHistoryBtn.click( | |
save_chat_history, | |
[saveFileName, systemPromptTxt, history, chatbot], | |
downloadFile, | |
show_progress=True, | |
) | |
saveHistoryBtn.click(get_history_names, None, [historyFileSelectDropdown]) | |
exportMarkdownBtn.click( | |
export_markdown, | |
[saveFileName, systemPromptTxt, history, chatbot], | |
downloadFile, | |
show_progress=True, | |
) | |
historyRefreshBtn.click(get_history_names, None, [historyFileSelectDropdown]) | |
historyFileSelectDropdown.change( | |
load_chat_history, | |
[historyFileSelectDropdown, systemPromptTxt, history, chatbot], | |
[saveFileName, systemPromptTxt, history, chatbot], | |
show_progress=True, | |
) | |
downloadFile.change( | |
load_chat_history, | |
[downloadFile, systemPromptTxt, history, chatbot], | |
[saveFileName, systemPromptTxt, history, chatbot], | |
) | |
# Advanced | |
default_btn.click( | |
reset_default, [], [apiurlTxt, proxyTxt, status_display], show_progress=True | |
) | |
changeAPIURLBtn.click( | |
change_api_url, | |
[apiurlTxt], | |
[status_display], | |
show_progress=True, | |
) | |
changeProxyBtn.click( | |
change_proxy, | |
[proxyTxt], | |
[status_display], | |
show_progress=True, | |
) | |
logging.info( | |
colorama.Back.GREEN | |
+ "\nTips: Visit http://localhost:7860 to view the interface" | |
+ colorama.Style.RESET_ALL | |
) | |
# 默认开启本地服务器,默认可以直接从IP访问,默认不创建公开分享链接 | |
demo.title = "SamGPT" | |
if __name__ == "__main__": | |
reload_javascript() | |
# if running in Docker | |
if dockerflag: | |
if authflag: | |
demo.queue(concurrency_count=CONCURRENT_COUNT).launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
auth=auth_list, | |
favicon_path="./assets/favicon.ico", | |
) | |
else: | |
demo.queue(concurrency_count=CONCURRENT_COUNT).launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=False, | |
favicon_path="./assets/favicon.ico", | |
) | |
# if not running in Docker | |
else: | |
if authflag: | |
demo.queue(concurrency_count=CONCURRENT_COUNT).launch( | |
share=False, | |
auth=auth_list, | |
favicon_path="./assets/favicon.ico", | |
inbrowser=True, | |
) | |
else: | |
demo.queue(concurrency_count=CONCURRENT_COUNT).launch( | |
share=False, favicon_path="./assets/favicon.ico", inbrowser=True | |
) # 改为 share=True 可以创建公开分享链接 | |
# demo.queue(concurrency_count=CONCURRENT_COUNT).launch(server_name="0.0.0.0", server_port=7860, share=False) # 可自定义端口 | |
# demo.queue(concurrency_count=CONCURRENT_COUNT).launch(server_name="0.0.0.0", server_port=7860,auth=("在这里填写用户名", "在这里填写密码")) # 可设置用户名与密码 | |
# demo.queue(concurrency_count=CONCURRENT_COUNT).launch(auth=("在这里填写用户名", "在这里填写密码")) # 适合Nginx反向代理 | |