Files changed (1) hide show
  1. app.py +13 -8
app.py CHANGED
@@ -26,12 +26,14 @@ def get_text_chunks(text):
26
  chunks = text_splitter.split_text(text)
27
  return chunks
28
 
 
29
  def get_vectorstore(text_chunks):
30
  metadatas = [{"source": f"{i}-pl"} for i in range(len(text_chunks))]
31
  embeddings = OpenAIEmbeddings()
32
  vectorstore = Chroma.from_texts(texts=text_chunks, embedding=embeddings)
33
  return vectorstore
34
 
 
35
  def get_conversation_chain(vectorstore):
36
  llm = ChatOpenAI()
37
 
@@ -44,6 +46,7 @@ def get_conversation_chain(vectorstore):
44
  )
45
  return conversation_chain
46
 
 
47
  def handle_userinput(user_question):
48
  response = st.session_state.conversation({'question': user_question})
49
  st.session_state.chat_history = response['chat_history']
@@ -54,36 +57,38 @@ def handle_userinput(user_question):
54
  else:
55
  st.markdown(("AI: "+message.content))
56
 
 
57
  def main():
58
- st.title("PDF Question Answering")
59
  if "conversation" not in st.session_state:
60
  st.session_state.conversation = None
61
  if "chat_history" not in st.session_state:
62
  st.session_state.chat_history = None
63
 
64
  if st.session_state.conversation is not None:
65
- st.header("Ask questions from your PDF")
66
- user_question = st.text_input("Ask a question about your document:")
67
  if user_question:
68
  handle_userinput(user_question)
69
 
70
  if st.session_state.conversation is None:
71
  st.header("Upload your PDF here")
72
- pdf_doc = st.file_uploader("Browse your file here", type="pdf")
73
  if pdf_doc is not None:
74
  with st.spinner("Processing"):
75
  # get pdf text
76
  raw_text = extract_text_from_pdf(pdf_doc)
77
-
78
  # get the text chunks
79
  text_chunks = get_text_chunks(raw_text)
80
-
81
  # create vector store
82
  vectorstore = get_vectorstore(text_chunks)
83
-
84
  # create conversation chain
85
  st.session_state.conversation = get_conversation_chain(
86
  vectorstore)
87
 
 
 
88
  if __name__ == '__main__':
89
- main()
 
26
  chunks = text_splitter.split_text(text)
27
  return chunks
28
 
29
+
30
  def get_vectorstore(text_chunks):
31
  metadatas = [{"source": f"{i}-pl"} for i in range(len(text_chunks))]
32
  embeddings = OpenAIEmbeddings()
33
  vectorstore = Chroma.from_texts(texts=text_chunks, embedding=embeddings)
34
  return vectorstore
35
 
36
+
37
  def get_conversation_chain(vectorstore):
38
  llm = ChatOpenAI()
39
 
 
46
  )
47
  return conversation_chain
48
 
49
+
50
  def handle_userinput(user_question):
51
  response = st.session_state.conversation({'question': user_question})
52
  st.session_state.chat_history = response['chat_history']
 
57
  else:
58
  st.markdown(("AI: "+message.content))
59
 
60
+
61
  def main():
 
62
  if "conversation" not in st.session_state:
63
  st.session_state.conversation = None
64
  if "chat_history" not in st.session_state:
65
  st.session_state.chat_history = None
66
 
67
  if st.session_state.conversation is not None:
68
+ st.header("Ask questions from your PDF:books:")
69
+ user_question = st.chat_input("Ask a question about your documents:")
70
  if user_question:
71
  handle_userinput(user_question)
72
 
73
  if st.session_state.conversation is None:
74
  st.header("Upload your PDF here")
75
+ pdf_doc = st.file_uploader("Browse your file here",type="pdf")
76
  if pdf_doc is not None:
77
  with st.spinner("Processing"):
78
  # get pdf text
79
  raw_text = extract_text_from_pdf(pdf_doc)
80
+
81
  # get the text chunks
82
  text_chunks = get_text_chunks(raw_text)
83
+
84
  # create vector store
85
  vectorstore = get_vectorstore(text_chunks)
86
+
87
  # create conversation chain
88
  st.session_state.conversation = get_conversation_chain(
89
  vectorstore)
90
 
91
+ st.rerun()
92
+
93
  if __name__ == '__main__':
94
+ main()