Spaces:
Paused
Paused
File size: 4,426 Bytes
6ccd417 dae67e9 69570eb 10c5e96 dae67e9 6ccd417 0533c1e dae67e9 6ccd417 10c5e96 6ccd417 94ea8ae dae67e9 94ea8ae dae67e9 0533c1e dae67e9 6ccd417 0533c1e e58f7cf 7ffda7a 0533c1e 94ea8ae 0533c1e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
import gradio as gr
from gradio.themes.utils import colors
from t5 import T5
from koalpaca import KoAlpaca
LOCAL_TEST = False
MODEL_STRS = ['T5', 'KoAlpaca']
MODELS = []
cur_index = 0
def prepare_theme():
theme = gr.themes.Default(primary_hue=colors.gray,
secondary_hue=colors.emerald,
neutral_hue=colors.emerald).set(
body_background_fill="*primary_800",
body_background_fill_dark="*primary_800",
block_background_fill="*primary_700",
block_background_fill_dark="*primary_700",
border_color_primary="*secondary_300",
border_color_primary_dark="*secondary_300",
block_border_width="3px",
input_border_width="2px",
input_background_fill="*primary_700",
input_background_fill_dark="*primary_700",
background_fill_secondary="*primary_700",
background_fill_secondary_dark="*primary_700",
body_text_color="white",
body_text_color_dark="white",
block_label_text_color="*secondary_300",
block_label_text_color_dark="*secondary_300",
block_label_background_fill="*primary_800",
block_label_background_fill_dark="*primary_800",
color_accent_soft="*primary_600",
color_accent_soft_dark="*primary_600",
)
return theme
def chat(message, chat_history):
response = MODELS[cur_index].generate(message)
chat_history.append((message, response))
return "", gr.Chatbot(chat_history, label=MODEL_STRS[cur_index], bubble_full_width=False)
def change_model_index(idx, chatbot):
global cur_index
cur_index = idx
# print(cur_index)
return gr.Chatbot(chatbot, label=MODEL_STRS[cur_index], bubble_full_width=False)
if __name__=='__main__':
theme = prepare_theme()
MODELS.append(T5())
if not LOCAL_TEST:
MODELS.append(KoAlpaca())
with gr.Blocks(theme=theme) as demo:
with gr.Tab("์๊ฐ"):
gr.Markdown(
"""
# KOMUCHAT : Korean community-style relationship counseling chabot
KOMUChat: ์จ๋ผ์ธ ์ปค๋ฎค๋ํฐ ์คํ์ผ์ ์ฐ์ ์๋ด์ฑ๋ด
### ๊ฐ์
์ฌ๋๋ค์ ์ค์ ๋ํ๋ฅผ ๋ฐ์ํ๊ธฐ ์ํด ํ๊ตญ ์ปค๋ฎค๋ํฐ ์ํจ์ฝ๋ฆฌ์ + ์ธ์คํฐ์ฆ์์ ์์งํ ์ฝ 3๋ง๊ฑด์ ์ง๋ฌธ-๋ต๋ณ ๋ํ ๋ฐ์ดํฐ์
์ผ๋ก ํ์ต์ํจ ์ฑ๋ด<br>
์ด๋ณ, ๋ฐ์ดํธ์ฝ์ค, ์ ๋ฌผ ๋ฑ ์ฐ์ ์ ๊ด๋ จ๋ ์ง๋ฌธ์ ๋ํด ์จ๋ผ์ธ ์ปค๋ฎค๋ํฐ์์ ์๋ดํ๋ ๊ฒ ๊ฐ์ด ์๋ตํฉ๋๋ค.<br>
<b>paust-t5-large</b>๋ชจ๋ธ(ํ๊ท ์๋ต์๊ฐ n์ด)๊ณผ <b>polyglot-ko-12.8b-safetensors</b> ๋ชจ๋ธ(ํ๊ท ์๋ต์๊ฐ n์ด)์ ํ์ธํ๋ํ์ฌ ์ ๊ณตํฉ๋๋ค.<br>
๋ฐ๋ชจ์์ ์ ํํ์ฌ ์ด์ฉํด๋ณด์ค ์ ์์ต๋๋ค.
### ์๋ต์์
| Question | Answer |
| --- | --- |
| ํค์ด์ก์๋๋ค | ํ๋ด ๋ ์ข์ ์ธ์ฐ ๋ํ๋ ๊ฑฐ์ผ |
| ๋๋ผ๋ง๋ง ์ฃผ๊ตฌ์ฅ์ฐฝ ๋ณด๋์ค | ๋ญ ๋ณด๊ณ ์์ผ์ ๊ฐ์ |
| ๊ฒฐํผ๊น์ง ์๊ฐํ๋ ์ํ์ผ์๋ก ๋ ๊น๊นํ๊ฒ ๋ฐ์ง๊ฒ ๋ผ? | ์๋ฌด๋๋ ๊ทธ๋ฐ ๊ฑฐ ๊ฐ์ |
| ISTJ ์ฌ์๊ฐ ๋จ์ํํ
๊ด์ฌ์์ผ๋ฉด ์ด๋ป๊ฒ ํด? | ์ฐ๋ฝ ์์ฃผ ํ๊ณ ์ทจํฅ ๋ง์ด ๋ฌผ์ด๋ณด๊ณ ์์ฃผ๋ ์๋์ด๋ ๊พธ์คํ ๋ง๋๋ ค ํจ |
| #์ฒญ์#๋ค์ ์ฐ์ ๋ ๊ฐ์๊ธฐ ์์ํ๋ค๊ณ ์๊ฐํด? | ๋๋ ์ง์ ์ ๊ธฐํ ๊ฒ ์ฐ์ ์ ๋ํ ๊ธฐ๋๊ฐ ๋จ์ด์ง๋ฉด ํ๊ณ ์๋๋ใ
ใ
ใ
|
### ์ฌ์ฉ๊ธฐ์
<img src="https://img.shields.io/badge/Python-3776AB?style=for-the-badge&logo=Python&logoColor=white"><img src="https://img.shields.io/badge/PyTorch-EE4C2C?style=for-the-badge&logo=PyTorch&logoColor=white">
""")
with gr.Tab("๋ฐ๋ชจ"):
with gr.Row():
rd = gr.Radio(MODEL_STRS, value='T5', type='index', label='Model Selection', show_label=True, interactive=True)
with gr.Column(scale=5): # ์ฑ๋ด ๋ถ๋ถ
chatbot = gr.Chatbot(label="T5", bubble_full_width=False)
with gr.Row():
txt = gr.Textbox(show_label=False, placeholder='์ฐ์ ๊ด๋ จ ์ง๋ฌธ์ ์
๋ ฅํ์ธ์!', container=False)
txt.submit(chat, [txt, chatbot], [txt, chatbot])
rd.select(change_model_index, [rd, chatbot], [chatbot])
demo.launch(debug=True, share=True) |