import gradio as gr from helpers.utils import ( import_llm ) import config global llm llm = None chat_template = { "user": "user\n{}\n", "assistant": "model\n{}\n", "generation_prompt": "model\n" } api_version_file = open("versions.txt", "r") api_version = api_version_file.read().strip() model_version_file = open("{}/versions.txt".format(config.MODEL_DIR), "r") model_version = model_version_file.read().strip() with gr.Blocks(title="Netray NLP") as nlp: gr.HTML("

Netray NLP

\
\
API Version: {}
\
Model Version: {}
\
".format(api_version, model_version)) bot = gr.Chatbot(label="Netray NLP") msg = gr.Textbox( placeholder="Tanya sesuatu! (Tekan \"enter\" untuk mengirim)", label="", elem_id="inputTextBox") clear = gr.Button('Clear', variant='stop') if llm == None: llm = import_llm(config.LLM_CONFIG) def user(user_message, history): return "", history + [[user_message, None]] def predict(history): prompt = "" if len(history) == 1: chat = history[0] user_input = config.PROMPT.format(chat[0]) prompt += chat_template['user'].format(user_input) prompt += chat_template['generation_prompt'] else: chat_history = "" for chat in history[:-1]: if chat[0] is not None: chat_history += f"Pertanyaan: {chat[0]}\n" if chat[1] is not None: chat_history += f"Jawaban: {chat[1]}\n" user_input = config.PROMPT_HISTORY.format( chat_history, history[-1][0] if history[-1][0] is not None else "" ) prompt += chat_template['user'].format(user_input) prompt += chat_template['generation_prompt'] history[-1][1] = "" for token in llm.stream(prompt): history[-1][1] += token yield history msg.submit(user, [msg, bot], [msg, bot], queue=False).then( predict, bot, bot ) clear.click(lambda: None, None, bot, queue=False)