Zigister / main.py
RohanVashisht's picture
Upload 7 files
e721bc0 verified
"""
Main script to run the LLM chatbot using the Zigistry framework
This script performs the following tasks:
1. Perform pre-requisites check
2. Configure LLM and embedding model
3. Data ingestion
4. Query handling
5. Launch the chatbot interface
"""
# --- Importing required libraries ---
from zigistry import constants
from llama_index.llms.openrouter import OpenRouter
from llama_index.core.llms import ChatMessage
from zigistry import pre_requisite
import gradio as gr
from llama_index.core import (
StorageContext,
load_index_from_storage,
VectorStoreIndex,
SimpleDirectoryReader,
ChatPromptTemplate,
Settings,
)
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
# -------------------------------------
# --- Perform pre-requisites and pre-requisite check ---
pre_requisite.performPreRequisites()
if not pre_requisite.CorrectlyCompletedPreRequisites():
exit(1)
# ------------------------------------------------------
# --- Configure LLM and embedding model ---
Settings.llm = OpenRouter(
model_name=constants.LLM,
api_key=constants.OR_TOKEN,
context_window=3000,
max_tokens=512,
temperature=constants.TEMPERATURE,
)
Settings.embed_model = HuggingFaceEmbedding(model_name=constants.EMBEDDER)
# -----------------------------------------
def data_ingestion():
"""
Ingest data from the input files and create an index
"""
documents = SimpleDirectoryReader(input_files=constants.FILES).load_data()
index = VectorStoreIndex.from_documents(documents)
index.storage_context.persist(persist_dir=constants.PERSIST_DIR)
def handle_query(query):
"""
Handle the query and return the response
"""
storage_context = StorageContext.from_defaults(persist_dir=constants.PERSIST_DIR)
index = load_index_from_storage(storage_context)
text_qa_template = ChatPromptTemplate.from_messages(constants.LLM_RESPONSE_FORMAT)
query_engine = index.as_query_engine(text_qa_template=text_qa_template)
answer = query_engine.query(query)
if hasattr(answer, "response"):
return answer.response
if isinstance(answer, dict) and "response" in answer:
return answer["response"]
return "Sorry, I couldn't find an answer."
if __name__ == "__main__":
data_ingestion()
# --- Launch the chatbot interface ---
demo = gr.Interface(
fn=handle_query,
inputs="text",
outputs="text",
title="LLM Chatbot",
flagging_mode="never",
)
demo.launch()
# -------------------------------------