Spaces:
Sleeping
Sleeping
Commit
·
6378e8d
1
Parent(s):
5789992
clean up
Browse files
app.py
CHANGED
@@ -12,7 +12,6 @@ from langchain.chains import RetrievalQA, LLMChain
|
|
12 |
from langchain.prompts import PromptTemplate
|
13 |
|
14 |
|
15 |
-
# embedding_file ="year_public_store_openai.pkl"
|
16 |
embedding_file = "subtitle_year_faiss_openai.pkl"
|
17 |
with open(embedding_file, 'rb') as f:
|
18 |
VectorStore = pickle.load(f)
|
@@ -55,7 +54,7 @@ def slow_echo(usr_message, chat_history):
|
|
55 |
|
56 |
answer_chain = RetrievalQA.from_chain_type(
|
57 |
chat_model,
|
58 |
-
retriever=VectorStore.as_retriever(search_type="similarity"
|
59 |
memory = memory,
|
60 |
chain_type_kwargs={"prompt": QA_CHAIN_PROMPT},
|
61 |
return_source_documents=True
|
|
|
12 |
from langchain.prompts import PromptTemplate
|
13 |
|
14 |
|
|
|
15 |
embedding_file = "subtitle_year_faiss_openai.pkl"
|
16 |
with open(embedding_file, 'rb') as f:
|
17 |
VectorStore = pickle.load(f)
|
|
|
54 |
|
55 |
answer_chain = RetrievalQA.from_chain_type(
|
56 |
chat_model,
|
57 |
+
retriever=VectorStore.as_retriever(search_type="similarity"),
|
58 |
memory = memory,
|
59 |
chain_type_kwargs={"prompt": QA_CHAIN_PROMPT},
|
60 |
return_source_documents=True
|