RAG-Chat / app.py
Nitish-py's picture
url validateion and response updates
1fbcbcc
raw
history blame
3.7 kB
import chainlit as cl
from llama_index.llms import MonsterLLM
from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext
def indexing(llm,path=None):
if path==None:
path="data.txt"
documents = SimpleDirectoryReader(input_files=[path]).load_data()
print("loading done")
service_context = ServiceContext.from_defaults(
chunk_size=1024, llm=llm, embed_model="local:BAAI/bge-small-en-v1.5"
)
print("indexing")
index = VectorStoreIndex.from_documents(documents, service_context=service_context, use_async=True)
query_engine = index.as_query_engine()
print("all done")
print(query_engine)
cl.user_session.set("engine", query_engine)
def qa(sp,engine,message):
message=message.content
ques=sp+" "+message
response=engine.query(ques)
return response
@cl.on_chat_start
async def factory():
url = await cl.AskUserMessage(author="Beast",content="Enter url").send()
print(url)
index_ai = url['output'].find(".monsterapi.ai")
url_ai = url['output'][:index_ai + len(".monsterapi.ai")]
auth = await cl.AskUserMessage(author="Beast",content="Enter auth token").send()
print(auth)
model = 'deploy-llm'
llm = MonsterLLM(model=model,base_url=url_ai,monster_api_key=auth['output'],temperature=0.75, context_window=1024)
cl.user_session.set("llm", llm)
res = await cl.AskActionMessage(author="Beast",
content="Do you want to enter system prompt?",
actions=[
cl.Action(name="yes", value="yes", label="βœ… Yes"),
cl.Action(name="no", value="no", label="❌ No"),
],
).send()
if res and res.get("value") == "yes":
sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send()
msg=cl.Message(author="Beast",content="Initiaing LLM....")
await msg.send()
await cl.make_async(indexing)(llm)
msg.content="Noted. Go ahead as your questions!!"
await msg.update()
cl.user_session.set("sp", sp["output"])
else:
msg=cl.Message(author="Beast",content="Initiaing LLM....")
await msg.send()
await cl.make_async(indexing)(llm)
msg.content="Okay, then you can start asking your questions!!"
await msg.update()
@cl.on_message
async def main(message: cl.Message):
engine = cl.user_session.get("engine")
llm=cl.user_session.get("llm")
sp=cl.user_session.get("sp")
if sp==None:
sp=""
if not message.elements:
msg = cl.Message(author="Beast",content=f"Generating Response...", disable_feedback=False)
await msg.send()
response =await cl.make_async(qa)(sp,engine,message)
print(response)
msg.content = str(response)
await msg.update()
elif message.elements:
go=True
for file in message.elements:
if "pdf" in file.mime:
pdf=file
else:
await cl.Message(author="Beast",content="We only support PDF for now").send()
go=False
break
if go:
msg = cl.Message(author="Beast",content=f"Processing `{pdf.name}`...")
await msg.send()
query_engine = await cl.make_async(indexing)(llm,pdf.path)
msg.content = f"`{pdf.name}` processed."
await msg.update()
msg = cl.Message(author="Beast",content=f"Generating Response...", disable_feedback=False)
await msg.send()
response =await cl.make_async(qa)(sp,query_engine,message)
print(response)
msg.content = str(response)
await msg.update()