Spaces:
Sleeping
Sleeping
pkarthik15
commited on
Commit
·
f4202f1
1
Parent(s):
9cbd8e3
add app and requirements
Browse files- app.py +75 -0
- requirements.txt +83 -0
app.py
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from langchain.document_loaders import PyPDFLoader
|
3 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
4 |
+
from langchain.embeddings import OpenAIEmbeddings
|
5 |
+
from langchain.vectorstores import FAISS
|
6 |
+
from langchain.llms import OpenAI
|
7 |
+
from langchain.chains import ConversationalRetrievalChain
|
8 |
+
import pickle
|
9 |
+
import gradio as gr
|
10 |
+
import time
|
11 |
+
|
12 |
+
|
13 |
+
|
14 |
+
|
15 |
+
def upload_file(file, key):
|
16 |
+
|
17 |
+
# Set the Enviroment variable
|
18 |
+
os.environ["OPENAI_API_KEY"] = key
|
19 |
+
|
20 |
+
# load document
|
21 |
+
loader = PyPDFLoader(file.name)
|
22 |
+
documents = loader.load()
|
23 |
+
|
24 |
+
# split the documents into chunks
|
25 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
|
26 |
+
texts = text_splitter.split_documents(documents)
|
27 |
+
|
28 |
+
# OPENAI embegddings
|
29 |
+
embeddings = OpenAIEmbeddings()
|
30 |
+
|
31 |
+
# create the vectorestore to use as the index
|
32 |
+
db = FAISS.from_documents(documents, embeddings)
|
33 |
+
|
34 |
+
with open("vectorstore.pkl", "wb") as f:
|
35 |
+
pickle.dump(db, f)
|
36 |
+
|
37 |
+
return file.name
|
38 |
+
|
39 |
+
|
40 |
+
with gr.Blocks() as demo:
|
41 |
+
|
42 |
+
openai_key = gr.Textbox(label="OPENAI API KEY")
|
43 |
+
file_output = gr.File(label="Please select a pdf file wait for the document to be displayed here")
|
44 |
+
upload_button = gr.UploadButton("Click to upload a pdf document", file_types=["pdf"], file_count="single")
|
45 |
+
upload_button.upload(upload_file, inputs = [upload_button, openai_key], outputs= file_output)
|
46 |
+
|
47 |
+
|
48 |
+
chatbot = gr.Chatbot(label="Chat")
|
49 |
+
msg = gr.Textbox(label="Enter your query")
|
50 |
+
clear = gr.Button("Clear")
|
51 |
+
|
52 |
+
def user(user_message, history):
|
53 |
+
return "", history + [[user_message, None]]
|
54 |
+
|
55 |
+
def bot(history):
|
56 |
+
user_message = history[-1][0]
|
57 |
+
with open("vectorstore.pkl", "rb") as f:
|
58 |
+
vectorstore = pickle.load(f)
|
59 |
+
llm = OpenAI(temperature=0)
|
60 |
+
qa = ConversationalRetrievalChain.from_llm(llm, vectorstore.as_retriever(), return_source_documents=True)
|
61 |
+
hist = []
|
62 |
+
if history[-1][1] != None:
|
63 |
+
hist = history
|
64 |
+
|
65 |
+
result = qa({"question": user_message, "chat_history": hist})
|
66 |
+
history[-1][1] = result['answer']
|
67 |
+
return history
|
68 |
+
|
69 |
+
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
|
70 |
+
bot, chatbot, chatbot
|
71 |
+
)
|
72 |
+
clear.click(lambda: None, None, chatbot, queue=False)
|
73 |
+
|
74 |
+
|
75 |
+
demo.launch()
|
requirements.txt
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
aiofiles==23.1.0
|
2 |
+
aiohttp==3.8.4
|
3 |
+
aiosignal==1.3.1
|
4 |
+
altair==4.2.2
|
5 |
+
anyio==3.6.2
|
6 |
+
async-timeout==4.0.2
|
7 |
+
attrs==23.1.0
|
8 |
+
cachetools==5.3.0
|
9 |
+
certifi==2022.12.7
|
10 |
+
charset-normalizer==3.1.0
|
11 |
+
click==8.1.3
|
12 |
+
colorama==0.4.6
|
13 |
+
contourpy==1.0.7
|
14 |
+
cycler==0.11.0
|
15 |
+
dataclasses-json==0.5.7
|
16 |
+
entrypoints==0.4
|
17 |
+
faiss-cpu==1.7.3
|
18 |
+
fastapi==0.95.1
|
19 |
+
ffmpy==0.3.0
|
20 |
+
filelock==3.11.0
|
21 |
+
fonttools==4.39.3
|
22 |
+
frozenlist==1.3.3
|
23 |
+
fsspec==2023.4.0
|
24 |
+
gptcache==0.1.12
|
25 |
+
gradio==3.27.0
|
26 |
+
gradio_client==0.1.3
|
27 |
+
greenlet==2.0.2
|
28 |
+
h11==0.14.0
|
29 |
+
httpcore==0.17.0
|
30 |
+
httpx==0.24.0
|
31 |
+
huggingface-hub==0.13.4
|
32 |
+
idna==3.4
|
33 |
+
importlib-resources==5.12.0
|
34 |
+
Jinja2==3.1.2
|
35 |
+
jsonschema==4.17.3
|
36 |
+
kiwisolver==1.4.4
|
37 |
+
langchain==0.0.141
|
38 |
+
linkify-it-py==2.0.0
|
39 |
+
markdown-it-py==2.2.0
|
40 |
+
MarkupSafe==2.1.2
|
41 |
+
marshmallow==3.19.0
|
42 |
+
marshmallow-enum==1.5.1
|
43 |
+
matplotlib==3.7.1
|
44 |
+
mdit-py-plugins==0.3.3
|
45 |
+
mdurl==0.1.2
|
46 |
+
multidict==6.0.4
|
47 |
+
mypy-extensions==1.0.0
|
48 |
+
numpy==1.24.2
|
49 |
+
openai==0.27.4
|
50 |
+
openapi-schema-pydantic==1.2.4
|
51 |
+
orjson==3.8.10
|
52 |
+
packaging==23.1
|
53 |
+
pandas==2.0.0
|
54 |
+
Pillow==9.5.0
|
55 |
+
pydantic==1.10.7
|
56 |
+
pydub==0.25.1
|
57 |
+
pyparsing==3.0.9
|
58 |
+
pypdf==3.8.0
|
59 |
+
pyrsistent==0.19.3
|
60 |
+
python-dateutil==2.8.2
|
61 |
+
python-multipart==0.0.6
|
62 |
+
pytz==2023.3
|
63 |
+
PyYAML==6.0
|
64 |
+
regex==2023.3.23
|
65 |
+
requests==2.28.2
|
66 |
+
semantic-version==2.10.0
|
67 |
+
six==1.16.0
|
68 |
+
sniffio==1.3.0
|
69 |
+
SQLAlchemy==1.4.47
|
70 |
+
starlette==0.26.1
|
71 |
+
tenacity==8.2.2
|
72 |
+
tiktoken==0.3.3
|
73 |
+
toolz==0.12.0
|
74 |
+
tqdm==4.65.0
|
75 |
+
typing-inspect==0.8.0
|
76 |
+
typing_extensions==4.5.0
|
77 |
+
tzdata==2023.3
|
78 |
+
uc-micro-py==1.0.1
|
79 |
+
urllib3==1.26.15
|
80 |
+
uvicorn==0.21.1
|
81 |
+
websockets==11.0.1
|
82 |
+
yarl==1.8.2
|
83 |
+
zipp==3.15.0
|