Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,8 +1,11 @@
|
|
1 |
import gradio as gr
|
2 |
import PyPDF2
|
3 |
-
|
|
|
4 |
from langchain.vectorstores.faiss import FAISS
|
5 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|
|
|
|
6 |
from langchain import OpenAI, VectorDBQA
|
7 |
|
8 |
import os
|
@@ -31,9 +34,11 @@ def pdf_to_text(pdf_file, query):
|
|
31 |
embeddings = OpenAIEmbeddings()
|
32 |
#vector store
|
33 |
vectorstore = FAISS.from_texts(texts, embeddings)
|
|
|
|
|
34 |
|
35 |
#inference
|
36 |
-
qa = VectorDBQA.from_chain_type(llm=
|
37 |
return qa.run(query)
|
38 |
|
39 |
|
|
|
1 |
import gradio as gr
|
2 |
import PyPDF2
|
3 |
+
#rom langchain.embeddings.openai import OpenAIEmbeddings
|
4 |
+
from langchain.embeddings import HuggingFaceEmbeddings
|
5 |
from langchain.vectorstores.faiss import FAISS
|
6 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
7 |
+
from langchain import HuggingFaceHub
|
8 |
+
|
9 |
from langchain import OpenAI, VectorDBQA
|
10 |
|
11 |
import os
|
|
|
34 |
embeddings = OpenAIEmbeddings()
|
35 |
#vector store
|
36 |
vectorstore = FAISS.from_texts(texts, embeddings)
|
37 |
+
llm = HuggingFaceHub(repo_id="google/flan-t5-xl", model_kwargs={"temperature":0, "max_length":512})
|
38 |
+
|
39 |
|
40 |
#inference
|
41 |
+
qa = VectorDBQA.from_chain_type(llm=llm, chain_type="stuff", vectorstore=vectorstore)
|
42 |
return qa.run(query)
|
43 |
|
44 |
|