File size: 9,629 Bytes
c6ba723
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e55d6a5
c6ba723
 
e55d6a5
c6ba723
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c147947
c6ba723
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
# import gradio as gr
import gradio
# import lmdb
# import base64
# import io
# import random
# import time
import json
import copy
# import sqlite3
from urllib.parse import urljoin
import openai

from app_js import api_key__get_from_browser, api_key__save_to_browser, saved_prompts_refresh_btn__click_js, selected_saved_prompt_title__change_js, saved_prompts_delete_btn__click_js, saved_prompts_save_btn__click_js, copy_prompt__click_js, paste_prompt__click_js, chat_copy_history_btn__click_js, chat_copy_history_md_btn__click_js, api_key_refresh_btn__click_js, api_key_save_btn__click_js

from functions import sequential_chat_fn, make_history_file_fn, on_click_send_btn, clear_history, copy_history, update_saved_prompt_titles, save_prompt, load_saved_prompt

introduction = """<center><h2>ChatGPT Batch Tool</h2></center>

<center>Hello. This is a tool for sending messages to ChatGPT in bulk.</center>

<center>With this tool, you can plan and send multiple messages to ChatGPT at once.</center>

Please note:

1. In order to use this tool, you will need to provide your own API Key and assume any associated costs. We do not collect or store your API Key. You can obtain your API Key by visiting https://platform.openai.com/account/api-keys.
2. The space for this demo page is public. For research and code improvement purposes, we need to log the chat content sent through this page, meaning we can see your chat history with ChatGPT in the background. **<span style="color:#ff5656;">By continuing to use this tool on this page, you agree to allow us to view, use, and share your chat data.</span>** If you wish to avoid this, you can [make a copy of this tool to your own private space](https://huggingface.co/spaces/hugforziio/chat-gpt-batch?duplicate=true), which also eliminates waiting in a queue.
"""


css = """
.table-wrap .cell-wrap input {min-width:80%}
#api-key-textbox textarea {filter:blur(8px); transition: filter 0.25s}
#api-key-textbox textarea:focus {filter:none}
#chat-log-md hr {margin-top: 1rem; margin-bottom: 1rem;}
#chat-markdown-wrap-box {max-height:80vh; overflow: auto !important;}
"""
with gradio.Blocks(title="ChatGPT Batch Tool", css=css) as demo:

    with gradio.Accordion("introduction", open=True):
        gradio.Markdown(introduction)

    with gradio.Accordion("Basic settings", open=True):
        system_prompt_enabled = gradio.Checkbox(label='Enable System level Prompt', info='Whether to use the system level prompt for ChatGPT task description as "System"', value=True)
        # System prompt
        system_prompt = gradio.Textbox(label='System level Prompt', info='Description of the task for ChatGPT as "System"', value='You are a part-of-speech classifier. Users will send you a word and you should determine its part-of-speech, such as nouns, verbs, etc.!!Please note!! ⚠️Highest priority!!: You may only directly return the part-of-speech without any extra information. Do not explain why it is this part-of-speech, etc., otherwise the program used by the user will fail and cause serious losses to the user😱!!!')
        # User message template
        user_message_template = gradio.Textbox(label='User Message Template', info='Template of messages to be sent in bulk', value='Word: ```___```')
        with gradio.Row():
            # Replacement area in user message template
            user_message_template_mask = gradio.Textbox(label='Template Placeholder', info='The part that needs to be replaced in the message template, can be a regular expression', value='___')
            # Is the replacement area in the user message template a regex
            user_message_template_mask_is_regex = gradio.Checkbox(label='Placeholder is regex', info='Is the placeholder in the message template a regular expression?', value=False)
        # User message replacement area list text
        user_message_list_text = gradio.Textbox(label='User Message List', info='All messages to be sent', value='animals| trains| between| of| located| what are you doing')
        with gradio.Row():
            # User message replacement area list splitter
            user_message_list_text_splitter = gradio.Textbox(label='User Message Splitter', info='Splitter used to split user message list, such as comma (`,`), line feed (`\n`), or regular expressions', value='\\|\\s+')
            # Is the splitter for the user message replacement area list a regex
            user_message_list_text_splitter_is_regex = gradio.Checkbox(label='Splitter is regex', info='Is the splitter for the user message list a regular expression?', value=True)
        # Number of history records
        history_prompt_num = gradio.Slider(label="Number of History Records", info='How many previous history records to include when sending a message (for ChatGPT to understand the context)', value=0, minimum=0, maximum=12000)

        # load_config_from_browser = gradio.Button("🔄 Load Configuration from Browser")
        # save_config_to_browser = gradio.Button("💾 Save Configuration to Browser")
        # export_config_to_file = gradio.Button("📤 Export Configuration to File")

    # 更多参数
    with gradio.Accordion("More settings", open=False):
        # 时间间隔
        sleep_base = gradio.Number(label='sleep between each message (ms)', value=700)
        # 时间间隔浮动
        sleep_rand = gradio.Number(label='sleep float (ms)', value=200)
        # 那些参数
        prop_stream = gradio.Checkbox(label="use stream", value=True)
        prop_model = gradio.Textbox(label="model", value="gpt-3.5-turbo")
        prop_temperature = gradio.Slider(label="temperature", value=1, minimum=0, maximum=2)
        prop_top_p = gradio.Slider(label="top_p", value=1, minimum=0, maximum=1)
        prop_choices_num = gradio.Slider(label="choices num(n)", value=1, minimum=1, maximum=20)
        prop_max_tokens = gradio.Slider(label="max_tokens", value=-1, minimum=-1, maximum=4096)
        prop_presence_penalty = gradio.Slider(label="presence_penalty", value=0, minimum=-2, maximum=2)
        prop_frequency_penalty = gradio.Slider(label="frequency_penalty", value=0, minimum=-2, maximum=2)
        prop_logit_bias = gradio.Textbox(label="logit_bias", visible=False)
    pass

    # 欸丕艾科易
    token_text = gradio.Textbox(visible=False)
    with gradio.Row():
        with gradio.Column(scale=10, min_width=100):
            api_key_text = gradio.Textbox(label="Your API key", placeholder="sk-...", elem_id="api-key-textbox")
        with gradio.Column(scale=1, min_width=100):
            api_key_load_btn = gradio.Button("🔄 Load from browser storage")
            api_key_load_btn.click(
                None,
                inputs=[],
                outputs=[api_key_text, token_text],
                _js=api_key__get_from_browser,
            )
        with gradio.Column(scale=1, min_width=100):
            api_key_save_btn = gradio.Button("💾 save to browser storage")
            api_key_save_btn.click(
                None,
                inputs=[api_key_text, token_text],
                outputs=[api_key_text, token_text],
                _js=api_key__save_to_browser,
            )
        pass
    pass

    # 开始执行按钮
    start_btn = gradio.Button(value='Run!')

    with gradio.Accordion(label="Chat log", elem_id='chat-markdown-wrap-box'):
        # 输出区域(隐藏状态)
        history = gradio.State(value=[])
        # 输出区域(md渲染)
        history_md_stable = gradio.Markdown(value="🙂")
        history_md_stream = gradio.Markdown(value="🤖")

    with gradio.Accordion("Status"):
        tips = gradio.Markdown(value="ready")

    # 中止执行按钮
    stop_btn = gradio.Button(value='Stop!')

    with gradio.Accordion("Download", open=False):
        # gradio.Markdown("(Currently unable to download, possibly due to restrictions from Hugging Face. Will update later.)")
        make_file_btn = gradio.Button(value='Generate files')
        with gradio.Row(visible=False) as file_row:
            # 下载区域(json文件)
            history_file_json = gradio.File(label='Download Json', interactive=False)
            # 下载区域(md文件)
            history_file_md = gradio.File(label='Download Markdown', interactive=False)
        pass
    pass


    make_file_btn.click(
        fn=make_history_file_fn,
        inputs=[history],
        outputs=[history_file_json, history_file_md, file_row],
    )


    start_event = start_btn.click(
        fn=sequential_chat_fn,
        inputs=[
            history,

            system_prompt_enabled,
            system_prompt,
            user_message_template,
            user_message_template_mask,
            user_message_template_mask_is_regex,
            user_message_list_text,
            user_message_list_text_splitter,
            user_message_list_text_splitter_is_regex,
            history_prompt_num,

            api_key_text, token_text,

            sleep_base,
            sleep_rand,
            prop_stream,
            prop_model,
            prop_temperature,
            prop_top_p,
            prop_choices_num,
            prop_max_tokens,
            prop_presence_penalty,
            prop_frequency_penalty,
            prop_logit_bias,
        ],
        outputs=[
            history,
            history_md_stable,
            history_md_stream,
            tips,
            file_row,
        ],
    )
    stop_btn.click(
        fn=None,
        inputs=[],
        outputs=[],
        cancels=[start_event],
    )


if __name__ == "__main__":
    demo.queue(concurrency_count=200).launch()