|
import streamlit as st
|
|
from langchain_google_genai import GoogleGenerativeAIEmbeddings, ChatGoogleGenerativeAI
|
|
from langchain.vectorstores import FAISS
|
|
from langchain.chains.question_answering import load_qa_chain
|
|
from langchain.prompts import PromptTemplate
|
|
import google.generativeai as genai
|
|
from dotenv import load_dotenv
|
|
import os
|
|
|
|
|
|
load_dotenv()
|
|
os.getenv("GOOGLE_API_KEY")
|
|
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
|
|
|
|
|
|
def load_vector_store():
|
|
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
|
|
|
vector_store = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
|
|
return vector_store
|
|
|
|
|
|
def get_conversational_chain():
|
|
prompt_template = """
|
|
Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
|
|
provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n
|
|
Context:\n {context}?\n
|
|
Question: \n{question}\n
|
|
|
|
Answer:
|
|
"""
|
|
|
|
model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
|
|
prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
|
|
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
|
|
|
|
return chain
|
|
|
|
|
|
def handle_user_query(user_question):
|
|
vector_store = load_vector_store()
|
|
docs = vector_store.similarity_search(user_question)
|
|
|
|
chain = get_conversational_chain()
|
|
|
|
response = chain(
|
|
{"input_documents": docs, "question": user_question},
|
|
return_only_outputs=True
|
|
)
|
|
|
|
return response.get("output_text", "No response generated.")
|
|
|
|
|
|
def main():
|
|
st.set_page_config("Chat with PDF")
|
|
st.header("ASK about general theory of relativity")
|
|
|
|
|
|
if 'vector_store' not in st.session_state:
|
|
st.session_state.vector_store = load_vector_store()
|
|
|
|
|
|
user_question = st.text_input("Ask a Question")
|
|
|
|
if user_question:
|
|
response = handle_user_query(user_question)
|
|
st.write("Reply:", response)
|
|
|
|
if __name__ == "__main__":
|
|
main()
|
|
|