akashmishra358 commited on
Commit
51d730a
·
verified ·
1 Parent(s): fffa086

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -8
app.py CHANGED
@@ -26,12 +26,14 @@ def get_text_chunks(text):
26
  chunks = text_splitter.split_text(text)
27
  return chunks
28
 
 
29
  def get_vectorstore(text_chunks):
30
  metadatas = [{"source": f"{i}-pl"} for i in range(len(text_chunks))]
31
  embeddings = OpenAIEmbeddings()
32
  vectorstore = Chroma.from_texts(texts=text_chunks, embedding=embeddings)
33
  return vectorstore
34
 
 
35
  def get_conversation_chain(vectorstore):
36
  llm = ChatOpenAI()
37
 
@@ -44,6 +46,7 @@ def get_conversation_chain(vectorstore):
44
  )
45
  return conversation_chain
46
 
 
47
  def handle_userinput(user_question):
48
  response = st.session_state.conversation({'question': user_question})
49
  st.session_state.chat_history = response['chat_history']
@@ -54,35 +57,38 @@ def handle_userinput(user_question):
54
  else:
55
  st.markdown(("AI: "+message.content))
56
 
 
57
  def main():
58
- st.title("PDF Question Answering")
59
- if "conversation" not in st.session_state or st.session_state.conversation is None:
60
  st.session_state.conversation = None
 
61
  st.session_state.chat_history = None
62
 
63
  if st.session_state.conversation is not None:
64
- st.header("Ask questions from your PDF")
65
- user_question = st.text_input("Ask a question about your document:")
66
  if user_question:
67
  handle_userinput(user_question)
68
 
69
  if st.session_state.conversation is None:
70
  st.header("Upload your PDF here")
71
- pdf_doc = st.file_uploader("Browse your file here", type="pdf")
72
  if pdf_doc is not None:
73
  with st.spinner("Processing"):
74
  # get pdf text
75
  raw_text = extract_text_from_pdf(pdf_doc)
76
-
77
  # get the text chunks
78
  text_chunks = get_text_chunks(raw_text)
79
-
80
  # create vector store
81
  vectorstore = get_vectorstore(text_chunks)
82
-
83
  # create conversation chain
84
  st.session_state.conversation = get_conversation_chain(
85
  vectorstore)
86
 
 
 
87
  if __name__ == '__main__':
88
  main()
 
26
  chunks = text_splitter.split_text(text)
27
  return chunks
28
 
29
+
30
  def get_vectorstore(text_chunks):
31
  metadatas = [{"source": f"{i}-pl"} for i in range(len(text_chunks))]
32
  embeddings = OpenAIEmbeddings()
33
  vectorstore = Chroma.from_texts(texts=text_chunks, embedding=embeddings)
34
  return vectorstore
35
 
36
+
37
  def get_conversation_chain(vectorstore):
38
  llm = ChatOpenAI()
39
 
 
46
  )
47
  return conversation_chain
48
 
49
+
50
  def handle_userinput(user_question):
51
  response = st.session_state.conversation({'question': user_question})
52
  st.session_state.chat_history = response['chat_history']
 
57
  else:
58
  st.markdown(("AI: "+message.content))
59
 
60
+
61
  def main():
62
+ if "conversation" not in st.session_state:
 
63
  st.session_state.conversation = None
64
+ if "chat_history" not in st.session_state:
65
  st.session_state.chat_history = None
66
 
67
  if st.session_state.conversation is not None:
68
+ st.header("Ask questions from your PDF:books:")
69
+ user_question = st.chat_input("Ask a question about your documents:")
70
  if user_question:
71
  handle_userinput(user_question)
72
 
73
  if st.session_state.conversation is None:
74
  st.header("Upload your PDF here")
75
+ pdf_doc = st.file_uploader("Browse your file here",type="pdf")
76
  if pdf_doc is not None:
77
  with st.spinner("Processing"):
78
  # get pdf text
79
  raw_text = extract_text_from_pdf(pdf_doc)
80
+
81
  # get the text chunks
82
  text_chunks = get_text_chunks(raw_text)
83
+
84
  # create vector store
85
  vectorstore = get_vectorstore(text_chunks)
86
+
87
  # create conversation chain
88
  st.session_state.conversation = get_conversation_chain(
89
  vectorstore)
90
 
91
+ st.rerun()
92
+
93
  if __name__ == '__main__':
94
  main()