tdecae commited on
Commit
f4d77f2
Β·
verified Β·
1 Parent(s): 3e606db

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -3
app.py CHANGED
@@ -116,7 +116,7 @@
116
  import os
117
  import sys
118
  import requests
119
- from langchain.chains import ConversationalRetrievalChain
120
  from langchain.document_loaders import PyPDFLoader, Docx2txtLoader, TextLoader
121
  from langchain.text_splitter import CharacterTextSplitter
122
  from langchain.vectorstores import Chroma
@@ -223,10 +223,27 @@ prompt = PromptTemplate(
223
  # πŸ”— QA chain with custom prompt
224
  qa_chain = load_qa_chain(llm, chain_type="stuff", prompt=prompt)
225
 
226
- # πŸ”— Conversational chain
227
- chain = ConversationalRetrievalChain.from_llm(
 
 
 
 
 
 
 
 
 
 
 
228
  llm=llm,
 
 
 
 
 
229
  retriever=vectorstore.as_retriever(search_kwargs={'k': 6}),
 
230
  combine_docs_chain=qa_chain,
231
  return_source_documents=True,
232
  verbose=False
@@ -255,3 +272,4 @@ with gr.Blocks() as demo:
255
  demo.launch(debug=True) # remove share=True if running in HF Spaces
256
 
257
 
 
 
116
  import os
117
  import sys
118
  import requests
119
+ from langchain.chains import ConversationalRetrievalChain, LLMChain
120
  from langchain.document_loaders import PyPDFLoader, Docx2txtLoader, TextLoader
121
  from langchain.text_splitter import CharacterTextSplitter
122
  from langchain.vectorstores import Chroma
 
223
  # πŸ”— QA chain with custom prompt
224
  qa_chain = load_qa_chain(llm, chain_type="stuff", prompt=prompt)
225
 
226
+ # πŸ”· Question rephraser chain for follow-up questions β†’ standalone
227
+ CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(
228
+ """
229
+ Given the following conversation and a follow-up question, rephrase the follow-up question to be a standalone question.
230
+
231
+ Chat History:
232
+ {chat_history}
233
+ Follow Up Input: {question}
234
+ Standalone question:
235
+ """
236
+ )
237
+
238
+ question_generator = LLMChain(
239
  llm=llm,
240
+ prompt=CONDENSE_QUESTION_PROMPT
241
+ )
242
+
243
+ # πŸ”· Finally: build the ConversationalRetrievalChain manually
244
+ chain = ConversationalRetrievalChain(
245
  retriever=vectorstore.as_retriever(search_kwargs={'k': 6}),
246
+ question_generator=question_generator,
247
  combine_docs_chain=qa_chain,
248
  return_source_documents=True,
249
  verbose=False
 
272
  demo.launch(debug=True) # remove share=True if running in HF Spaces
273
 
274
 
275
+