|
|
|
import gradio as gr |
|
import os |
|
import time |
|
|
|
|
|
import os |
|
|
|
import openai |
|
from langchain.chains import ConversationalRetrievalChain |
|
|
|
from langchain.embeddings.openai import OpenAIEmbeddings |
|
from langchain.document_loaders import TextLoader |
|
from langchain.text_splitter import MarkdownTextSplitter |
|
|
|
|
|
from langchain.vectorstores import Chroma |
|
|
|
|
|
|
|
|
|
from langchain.chains.router import MultiRetrievalQAChain |
|
from langchain.llms import OpenAI |
|
|
|
css=""" |
|
#col-container {max-width: 700px; margin-left: auto; margin-right: auto;} |
|
""" |
|
|
|
title = """ |
|
<div style="text-align: center;max-width: 700px;"> |
|
<h1>Chat about Bulevar's Menu</h1> |
|
</div> |
|
""" |
|
|
|
prompt_hints = """ |
|
<div style="text-align: center;max-width: 700px;"> |
|
<p style="text-align: left;">What is in the crab tostada?<br /> |
|
</div> |
|
""" |
|
|
|
|
|
REST_PERSIST_DIRECTORY = "chromadb_bul_details" |
|
FOOD_GUIDE_PERSIST_DIRECTORY = "chromadb_food_guide" |
|
|
|
|
|
|
|
|
|
|
|
|
|
def loading_pdf(): |
|
return "Loading..." |
|
|
|
def loading_database(open_ai_key): |
|
if open_ai_key is not None: |
|
os.environ['OPENAI_API_KEY'] = open_ai_key |
|
openai.api_key = open_ai_key |
|
|
|
embeddings = OpenAIEmbeddings(openai_api_key=open_ai_key) |
|
|
|
bulevar_restaurant_texts = [ |
|
"Bulevar is open Sunday through Wednesday from 5-9pm, and Thursday through Saturday from 4-10pm. It is open for lunch on Friday from 11-3pm", |
|
"Bulevar is located in the Arboretum at 360 and Mopac, next to Eddie V's", |
|
"Bulevar offers tasty Mexican Cuisine with a laid back style to fine-dining.", |
|
"Bulevar is another restaurant created by Guy and Larry. With the success of their ATX Cocina, Bulevar has created another unique dining experience with high quality dishes." |
|
] |
|
bulevar_details_retriever = Chroma.from_texts(bulevar_restaurant_texts, embeddings, persist_directory=REST_PERSIST_DIRECTORY) |
|
if not os.path.exists(REST_PERSIST_DIRECTORY): |
|
save_dir(bulevar_details_retriever) |
|
loader = TextLoader('raw_text/food_guide.md') |
|
documents = loader.load() |
|
|
|
|
|
text_splitter = MarkdownTextSplitter(chunk_size=1000, chunk_overlap=0) |
|
docs = text_splitter.split_documents(documents) |
|
|
|
docs_retriever = Chroma.from_documents(docs, embeddings, persist_directory=FOOD_GUIDE_PERSIST_DIRECTORY) |
|
|
|
if not os.path.exists(FOOD_GUIDE_PERSIST_DIRECTORY): |
|
save_dir(docs_retriever) |
|
retriever_infos = [ |
|
{ |
|
"name": "Food Guide", |
|
"description": "Good for answering questions about the menu", |
|
"retriever": docs_retriever.as_retriever() |
|
}, |
|
{ |
|
"name": "Bulevar Restaurant Details", |
|
"description": "Good for answering questions about Bulevar's hours, and restaurant details such as its mission, history, and owners.", |
|
"retriever": bulevar_details_retriever.as_retriever() |
|
} |
|
] |
|
global chain |
|
chain = MultiRetrievalQAChain.from_retrievers(OpenAI(temperature=0, openai_api_key=open_ai_key), retriever_infos, verbose=True) |
|
return "Ready" |
|
else: |
|
return "You forgot OpenAI API key" |
|
|
|
def save_dir(vectorstore_retriever): |
|
vectorstore_retriever.persist() |
|
|
|
def add_text(history, text): |
|
history = history + [(text, None)] |
|
return history, "" |
|
|
|
|
|
def bot(history): |
|
response = infer(history[-1][0], history) |
|
history[-1][1] = "" |
|
for character in response: |
|
history[-1][1] += character |
|
time.sleep(0.05) |
|
yield history |
|
|
|
|
|
def infer(question, history): |
|
|
|
|
|
|
|
|
|
res = [] |
|
|
|
|
|
|
|
|
|
chat_history = res |
|
query = question |
|
result = chain({"input": query}) |
|
return result["result"] |
|
|
|
def update_message(question_component, chat_prompts): |
|
question_component.value = chat_prompts.get_name() |
|
return None |
|
|
|
with gr.Blocks(css=css) as demo: |
|
with gr.Column(elem_id="col-container"): |
|
gr.HTML(title) |
|
with gr.Column(): |
|
with gr.Row(): |
|
openai_key = gr.Textbox(label="OpenAI API key", type="password") |
|
submit_api_key = gr.Button("Submit") |
|
with gr.Row(): |
|
langchain_status = gr.Textbox(label="Status", placeholder="", interactive=False) |
|
|
|
chatbot = gr.Chatbot([], elem_id="chatbot").style(height=350) |
|
question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter ") |
|
submit_btn = gr.Button("Send Message") |
|
gr.HTML(prompt_hints) |
|
|
|
submit_api_key.click(loading_database, inputs=[openai_key], outputs=[langchain_status], queue=False) |
|
|
|
question.submit(add_text, [chatbot, question], [chatbot, question]).then( |
|
bot, chatbot, chatbot |
|
) |
|
submit_btn.click(add_text, [chatbot, question], [chatbot, question]).then( |
|
bot, chatbot, chatbot) |
|
|
|
demo.queue(concurrency_count=2, max_size=20).launch() |