Spaces:
Sleeping
Sleeping
File size: 3,237 Bytes
278ff72 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 |
### Import Section ###
import chainlit as cl
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langchain_core.runnables.config import RunnableConfig
from utilities.all_utilities import process_file
from utilities.prompts import get_opening_content
################
# General code
################
load_dotenv()
openai_api_key = os.getenv("OPENAI_API_KEY")
# ChatOpenAI Templates
@cl.action_callback("icelandic")
async def on_action(action):
cl.user_session.set("language", "icelandic")
await cl.Message(content=f"Changing to {action.name}").send()
# Optionally remove the action button from the chatbot user interface
# await action.remove()
@cl.action_callback("english")
async def on_action(action):
cl.user_session.set("language", "english")
await cl.Message(content=f"Changing to {action.name}").send()
# Optionally remove the action button from the chatbot user interface
# await action.remove()
#############################################
### On Chat Start (Session Start) Section ###
#############################################
@cl.on_chat_start
async def on_chat_start():
actions = [
cl.Action(name="icelandic", value="icelandic", description="Switch to Icelandic"),
cl.Action(name="english", value="english", description="Switch to English")
]
await cl.Message(content="Languages", actions=actions).send()
await cl.Message(content=get_opening_content()).send()
prompt_cache_input = await cl.AskActionMessage(
content="Do you want to use Prompt Cache?",
actions=[
cl.Action(name="yes", value="yes", label="✅ Yes"),
cl.Action(name="no", value="no", label="❌ No"),
],
).send()
prompt_cache = prompt_cache_input.get("value")
files = None
# Wait for the user to upload a file
while not files:
files = await cl.AskFileMessage(
content="Please upload a .pdf file to begin processing!",
accept=["application/pdf"],
max_size_mb=20,
timeout=180,
).send()
file = files[0]
msg = cl.Message(
content=f"Processing `{file.name}`...", disable_human_feedback=True
)
await msg.send()
response = process_file(file, prompt_cache)
rag_chain = response["chain"]
retriever = response["retriever"]
msg.content = f"Processing `{file.name}` is complete."
await msg.update()
msg.content = f"You can now ask questions about `{file.name}`."
await msg.update()
cl.user_session.set("chain", rag_chain)
cl.user_session.set("retriever", retriever)
##########################
### On Message Section ###
##########################
@cl.on_message
async def main(message: cl.Message):
# Ensure that message.content is not None or empty
chain = cl.user_session.get("chain")
language = cl.user_session.get("language", "english")
msg = cl.Message(content="")
question = message.content
async for chunk in chain.astream(
{"question": question, "language": language},
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
):
await msg.stream_token(chunk.content)
await msg.send()
|