|
import gradio as gr |
|
|
|
from load_model import load_Auto |
|
from load_push import all_files |
|
from retriever import * |
|
from retrieve_docs import * |
|
from make_chain_model import make_chain_llm |
|
from make_answer import * |
|
from transformers import TextStreamer |
|
|
|
llm = load_Auto() |
|
pinecone,bm25 = all_files('files') |
|
retriever=retriever(pinecone,bm25) |
|
rag_chain = make_chain_llm(retriever,llm) |
|
|
|
def response(message, history): |
|
return rag_chain.invoke(message) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
""" |
|
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface |
|
""" |
|
|
|
TITLE = "FUT FUT Chatbot" |
|
|
|
DESCRIPTION = """ |
|
'ํด์'์ฒด๋ฅผ ์ฌ์ฉํ๋ฉฐ ์น์ ํ๊ฒ ๋ตํ๋ ํํ์ด ์ฑ๋ด. |
|
A rag pipeline with a chatbot feature |
|
Resources used to build this project : |
|
* embedding model : https://huggingface.co/BM-K/KoSimCSE-roberta-multitask |
|
* dataset : https://huggingface.co/datasets/Dongwookss/q_a_korean_futsal |
|
* vector DB : PINECONE |
|
* chatbot : https://huggingface.co/Dongwookss/small_fut_final |
|
""" |
|
|
|
Examples = [['์ํฅ ํ์ด ๊ตฌ์ฅ ์ถ์ฒํด์ค'],['ํ์ด ๊ฒฝ๊ธฐ ๊ท์น ์ค๋ช
ํด์ค'], ['ํ์ด ๊ฒฝ๊ธฐ ์๊ฐ ์๋ ค์ค']] |
|
|
|
demo = gr.ChatInterface( |
|
fn=response, |
|
chatbot=gr.Chatbot( |
|
show_label=True, |
|
show_share_button=True, |
|
show_copy_button=True, |
|
likeable=True, |
|
layout="bubble", |
|
bubble_full_width=False, |
|
), |
|
theme="Soft", |
|
examples=Examples, |
|
title=TITLE, |
|
description=DESCRIPTION, |
|
) |
|
demo.launch(debug=True) |
|
|
|
|
|
|