Upload gradio app (manual upload)
Browse files- .gitignore +22 -0
- Untitled.ipynb +502 -0
- app.py +157 -0
- build_database.py +103 -0
- requirements.txt +4 -0
- steps.md +14 -0
.gitignore
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
# ignore all jupyter notebooks
|
7 |
+
# *.ipynb
|
8 |
+
# Jupyter Notebook
|
9 |
+
.ipynb_checkpoints
|
10 |
+
|
11 |
+
# select scripts
|
12 |
+
.chroma/
|
13 |
+
chromadb_bul_details/
|
14 |
+
chromadb_food_guide/
|
15 |
+
|
16 |
+
|
17 |
+
.env
|
18 |
+
|
19 |
+
optimizer.pt
|
20 |
+
# pytorch_model.bin
|
21 |
+
|
22 |
+
*.bin
|
Untitled.ipynb
ADDED
@@ -0,0 +1,502 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 11,
|
6 |
+
"id": "c031ebee-371a-4ffd-bb4b-8ee273a5dfc2",
|
7 |
+
"metadata": {
|
8 |
+
"tags": []
|
9 |
+
},
|
10 |
+
"outputs": [
|
11 |
+
{
|
12 |
+
"name": "stdout",
|
13 |
+
"output_type": "stream",
|
14 |
+
"text": [
|
15 |
+
"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\n"
|
16 |
+
]
|
17 |
+
}
|
18 |
+
],
|
19 |
+
"source": [
|
20 |
+
"import os\n",
|
21 |
+
"import openai\n",
|
22 |
+
"from dotenv import load_dotenv\n",
|
23 |
+
"load_dotenv()\n",
|
24 |
+
"\n",
|
25 |
+
"print(os.getenv(\"OPENAI_API_KEY\"))\n",
|
26 |
+
"openai.api_key = os.getenv(\"OPENAI_API_KEY\")\n",
|
27 |
+
"\n",
|
28 |
+
"from langchain import PromptTemplate\n",
|
29 |
+
"from langchain.chains import RetrievalQA\n",
|
30 |
+
"\n",
|
31 |
+
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
32 |
+
"from langchain.chat_models import ChatOpenAI\n",
|
33 |
+
"from langchain.text_splitter import CharacterTextSplitter\n",
|
34 |
+
"from langchain.text_splitter import MarkdownTextSplitter\n",
|
35 |
+
"from langchain.vectorstores import Chroma\n",
|
36 |
+
"from langchain.document_loaders import TextLoader\n",
|
37 |
+
"\n",
|
38 |
+
"from langchain.agents import Tool\n",
|
39 |
+
"from langchain.memory import ConversationBufferMemory\n",
|
40 |
+
"from langchain.chat_models import ChatOpenAI\n",
|
41 |
+
"from langchain.utilities import SerpAPIWrapper\n",
|
42 |
+
"from langchain.agents import initialize_agent\n",
|
43 |
+
"from langchain.agents import AgentType\n",
|
44 |
+
"from langchain.vectorstores import FAISS\n",
|
45 |
+
"\n",
|
46 |
+
"from langchain.chains.router import MultiRetrievalQAChain\n",
|
47 |
+
"from langchain.llms import OpenAI\n",
|
48 |
+
"\n",
|
49 |
+
"REST_PERSIST_DIRECTORY = \"chromadb_bul_details\"\n",
|
50 |
+
"FOOD_GUIDE_PERSIST_DIRECTORY = \"chromadb_food_guide\""
|
51 |
+
]
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"cell_type": "code",
|
55 |
+
"execution_count": 12,
|
56 |
+
"id": "e5f08445-3da2-4f1d-8b6f-624200cc77c2",
|
57 |
+
"metadata": {},
|
58 |
+
"outputs": [],
|
59 |
+
"source": [
|
60 |
+
"embeddings = OpenAIEmbeddings()"
|
61 |
+
]
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"cell_type": "code",
|
65 |
+
"execution_count": 13,
|
66 |
+
"id": "0db17fe4-0fe8-47f3-9626-8f31039859cd",
|
67 |
+
"metadata": {
|
68 |
+
"tags": []
|
69 |
+
},
|
70 |
+
"outputs": [
|
71 |
+
{
|
72 |
+
"name": "stderr",
|
73 |
+
"output_type": "stream",
|
74 |
+
"text": [
|
75 |
+
"No embedding_function provided, using default embedding function: DefaultEmbeddingFunction https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\n"
|
76 |
+
]
|
77 |
+
}
|
78 |
+
],
|
79 |
+
"source": [
|
80 |
+
"bulevar_restaurant_texts = [\n",
|
81 |
+
" \"Bulevar is open Sunday through Wednesday from 5-9pm, and Thursday through Saturday from 4-10pm. It is open for lunch on Friday from 11-3pm\",\n",
|
82 |
+
" \"Bulevar is located in the Arboretum at 360 and Mopac, next to Eddie V's\",\n",
|
83 |
+
" \"Bulevar is an excellent Mexican Cuisine restaurant with a laid back style to fine-dining.\",\n",
|
84 |
+
" \"Bulevar is another restaurant created by Guy and Larry. With the success of their ATX Cocina, Bulevar has created another unique dining experience with high quality dishes.\"\n",
|
85 |
+
"]\n",
|
86 |
+
"bulevar_details_retriever = Chroma.from_texts(bulevar_restaurant_texts, persist_directory=REST_PERSIST_DIRECTORY, embedding_function=embeddings)\n",
|
87 |
+
"bulevar_details_retriever.persist()"
|
88 |
+
]
|
89 |
+
},
|
90 |
+
{
|
91 |
+
"cell_type": "code",
|
92 |
+
"execution_count": 14,
|
93 |
+
"id": "ebf95554-2bf8-4665-a93a-9e033af54a64",
|
94 |
+
"metadata": {
|
95 |
+
"tags": []
|
96 |
+
},
|
97 |
+
"outputs": [],
|
98 |
+
"source": [
|
99 |
+
"loader = TextLoader('raw_text/food_guide.md')\n",
|
100 |
+
"documents = loader.load()"
|
101 |
+
]
|
102 |
+
},
|
103 |
+
{
|
104 |
+
"cell_type": "code",
|
105 |
+
"execution_count": 15,
|
106 |
+
"id": "55add9c5-cef1-4106-a9f8-560228d2c5ea",
|
107 |
+
"metadata": {
|
108 |
+
"tags": []
|
109 |
+
},
|
110 |
+
"outputs": [],
|
111 |
+
"source": [
|
112 |
+
"text_splitter = MarkdownTextSplitter(chunk_size=1000, chunk_overlap=0)\n",
|
113 |
+
"docs = text_splitter.split_documents(documents)"
|
114 |
+
]
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"cell_type": "code",
|
118 |
+
"execution_count": 16,
|
119 |
+
"id": "fffe2d0b-d1c9-453f-99b1-bad54b6c5614",
|
120 |
+
"metadata": {
|
121 |
+
"tags": []
|
122 |
+
},
|
123 |
+
"outputs": [
|
124 |
+
{
|
125 |
+
"data": {
|
126 |
+
"text/plain": [
|
127 |
+
"Document(page_content='## BONE-IN RIBEYE\\n\\n**<span style=\"text-decoration:underline;\">Chef Description:</span>**\\n\\nThe ribeye is carved from the primal section called the beef rib. It falls between the chuck (shoulder) and the loin, and spans from ribs six through twelve. This section of the animal naturally collects more intramuscular fat, creating the beautiful white lines of fat. Our ribeye is coming from Linz Meats out of Chicago, this cut is wet aged and hand cut, butter basted and rested. Served over a mixed animal roasted jus and with cebollitas, blistered shishitos and chipotle garlic butter. Cook time is roughly 45 minutes.\\n\\n**<span style=\"text-decoration:underline;\">Table Talk:</span>**\\n\\nOur prime wet aged ribeye is cooked over post oak wood and butter basted, served with cebollitas, shishitos & chipotle garlic butter\\n\\n**<span style=\"text-decoration:underline;\">Allergies:</span>**\\n\\nDairy,Garlic, Onion, Chilies', metadata={'source': 'raw_text/food_guide.md'})"
|
128 |
+
]
|
129 |
+
},
|
130 |
+
"execution_count": 16,
|
131 |
+
"metadata": {},
|
132 |
+
"output_type": "execute_result"
|
133 |
+
}
|
134 |
+
],
|
135 |
+
"source": [
|
136 |
+
"docs[23]"
|
137 |
+
]
|
138 |
+
},
|
139 |
+
{
|
140 |
+
"cell_type": "code",
|
141 |
+
"execution_count": 17,
|
142 |
+
"id": "c7d314fa-d1ac-462c-ac29-1a2a2b8a53af",
|
143 |
+
"metadata": {
|
144 |
+
"tags": []
|
145 |
+
},
|
146 |
+
"outputs": [
|
147 |
+
{
|
148 |
+
"name": "stderr",
|
149 |
+
"output_type": "stream",
|
150 |
+
"text": [
|
151 |
+
"No embedding_function provided, using default embedding function: DefaultEmbeddingFunction https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\n"
|
152 |
+
]
|
153 |
+
}
|
154 |
+
],
|
155 |
+
"source": [
|
156 |
+
"docs_retriever = Chroma.from_documents(docs, persist_directory=FOOD_GUIDE_PERSIST_DIRECTORY, embedding_function=embeddings)\n",
|
157 |
+
"docs_retriever.persist()"
|
158 |
+
]
|
159 |
+
},
|
160 |
+
{
|
161 |
+
"cell_type": "code",
|
162 |
+
"execution_count": 27,
|
163 |
+
"id": "a9e35123-7cfb-4890-bbe5-e1648aaf1cca",
|
164 |
+
"metadata": {
|
165 |
+
"tags": []
|
166 |
+
},
|
167 |
+
"outputs": [],
|
168 |
+
"source": [
|
169 |
+
"# docs_retriever = None\n",
|
170 |
+
"# bulevar_details_retriever = None"
|
171 |
+
]
|
172 |
+
},
|
173 |
+
{
|
174 |
+
"cell_type": "code",
|
175 |
+
"execution_count": 28,
|
176 |
+
"id": "b632b749-9e76-461a-87a8-036159d075cc",
|
177 |
+
"metadata": {
|
178 |
+
"tags": []
|
179 |
+
},
|
180 |
+
"outputs": [],
|
181 |
+
"source": [
|
182 |
+
"# docs_retriever = Chroma(persist_directory=FOOD_GUIDE_PERSIST_DIRECTORY, embedding_function=embeddings)\n",
|
183 |
+
"# bulevar_details_retriever = Chroma(persist_directory=REST_PERSIST_DIRECTORY, embedding_function=OpenAIEmbeddings())"
|
184 |
+
]
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"cell_type": "code",
|
188 |
+
"execution_count": 29,
|
189 |
+
"id": "a22fe6e4-434f-47aa-92bf-02d088db2d1f",
|
190 |
+
"metadata": {
|
191 |
+
"tags": []
|
192 |
+
},
|
193 |
+
"outputs": [
|
194 |
+
{
|
195 |
+
"ename": "AttributeError",
|
196 |
+
"evalue": "module 'chromadb.errors' has no attribute 'NotEnoughElementsException'",
|
197 |
+
"output_type": "error",
|
198 |
+
"traceback": [
|
199 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
200 |
+
"\u001b[0;31mInvalidDimensionException\u001b[0m Traceback (most recent call last)",
|
201 |
+
"File \u001b[0;32m~/miniconda/envs/mlops/lib/python3.10/site-packages/langchain/vectorstores/chroma.py:121\u001b[0m, in \u001b[0;36mChroma.__query_collection\u001b[0;34m(self, query_texts, query_embeddings, n_results, where, **kwargs)\u001b[0m\n\u001b[1;32m 120\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 121\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_collection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mquery\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 122\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_texts\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mquery_texts\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 123\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_embeddings\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mquery_embeddings\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 124\u001b[0m \u001b[43m \u001b[49m\u001b[43mn_results\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mi\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 125\u001b[0m \u001b[43m \u001b[49m\u001b[43mwhere\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mwhere\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 126\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 127\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 128\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m chromadb\u001b[38;5;241m.\u001b[39merrors\u001b[38;5;241m.\u001b[39mNotEnoughElementsException:\n",
|
202 |
+
"File \u001b[0;32m~/miniconda/envs/mlops/lib/python3.10/site-packages/chromadb/api/models/Collection.py:230\u001b[0m, in \u001b[0;36mCollection.query\u001b[0;34m(self, query_embeddings, query_texts, n_results, where, where_document, include)\u001b[0m\n\u001b[1;32m 228\u001b[0m where_document \u001b[38;5;241m=\u001b[39m {}\n\u001b[0;32m--> 230\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_query\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 231\u001b[0m \u001b[43m \u001b[49m\u001b[43mcollection_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mid\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 232\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_embeddings\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mquery_embeddings\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 233\u001b[0m \u001b[43m \u001b[49m\u001b[43mn_results\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mn_results\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 234\u001b[0m \u001b[43m \u001b[49m\u001b[43mwhere\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mwhere\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 235\u001b[0m \u001b[43m \u001b[49m\u001b[43mwhere_document\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mwhere_document\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 236\u001b[0m \u001b[43m \u001b[49m\u001b[43minclude\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minclude\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 237\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n",
|
203 |
+
"File \u001b[0;32m~/miniconda/envs/mlops/lib/python3.10/site-packages/chromadb/api/local.py:439\u001b[0m, in \u001b[0;36mLocalAPI._query\u001b[0;34m(self, collection_id, query_embeddings, n_results, where, where_document, include)\u001b[0m\n\u001b[1;32m 430\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_query\u001b[39m(\n\u001b[1;32m 431\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 432\u001b[0m collection_id: UUID,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 437\u001b[0m include: Include \u001b[38;5;241m=\u001b[39m [\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdocuments\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmetadatas\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdistances\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 438\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m QueryResult:\n\u001b[0;32m--> 439\u001b[0m uuids, distances \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_db\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_nearest_neighbors\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 440\u001b[0m \u001b[43m \u001b[49m\u001b[43mcollection_uuid\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcollection_id\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 441\u001b[0m \u001b[43m \u001b[49m\u001b[43mwhere\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mwhere\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 442\u001b[0m \u001b[43m \u001b[49m\u001b[43mwhere_document\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mwhere_document\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 443\u001b[0m \u001b[43m \u001b[49m\u001b[43membeddings\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mquery_embeddings\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 444\u001b[0m \u001b[43m \u001b[49m\u001b[43mn_results\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mn_results\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 445\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 447\u001b[0m include_embeddings \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124membeddings\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m include\n",
|
204 |
+
"File \u001b[0;32m~/miniconda/envs/mlops/lib/python3.10/site-packages/chromadb/db/clickhouse.py:591\u001b[0m, in \u001b[0;36mClickhouse.get_nearest_neighbors\u001b[0;34m(self, collection_uuid, where, where_document, embeddings, n_results)\u001b[0m\n\u001b[1;32m 590\u001b[0m index \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_index(collection_uuid)\n\u001b[0;32m--> 591\u001b[0m uuids, distances \u001b[38;5;241m=\u001b[39m \u001b[43mindex\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_nearest_neighbors\u001b[49m\u001b[43m(\u001b[49m\u001b[43membeddings\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mn_results\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mids\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 593\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m uuids, distances\n",
|
205 |
+
"File \u001b[0;32m~/miniconda/envs/mlops/lib/python3.10/site-packages/chromadb/db/index/hnswlib.py:272\u001b[0m, in \u001b[0;36mHnswlib.get_nearest_neighbors\u001b[0;34m(self, query, k, ids)\u001b[0m\n\u001b[1;32m 271\u001b[0m \u001b[38;5;66;03m# Check dimensionality\u001b[39;00m\n\u001b[0;32m--> 272\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_check_dimensionality\u001b[49m\u001b[43m(\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 274\u001b[0m \u001b[38;5;66;03m# Check Number of requested results\u001b[39;00m\n",
|
206 |
+
"File \u001b[0;32m~/miniconda/envs/mlops/lib/python3.10/site-packages/chromadb/db/index/hnswlib.py:130\u001b[0m, in \u001b[0;36mHnswlib._check_dimensionality\u001b[0;34m(self, data)\u001b[0m\n\u001b[1;32m 129\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m dim \u001b[38;5;241m!=\u001b[39m idx_dim:\n\u001b[0;32m--> 130\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m InvalidDimensionException(\n\u001b[1;32m 131\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDimensionality of (\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdim\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m) does not match index dimensionality (\u001b[39m\u001b[38;5;132;01m{\u001b[39;00midx_dim\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m)\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 132\u001b[0m )\n",
|
207 |
+
"\u001b[0;31mInvalidDimensionException\u001b[0m: Dimensionality of (1536) does not match index dimensionality (384)",
|
208 |
+
"\nDuring handling of the above exception, another exception occurred:\n",
|
209 |
+
"\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)",
|
210 |
+
"Cell \u001b[0;32mIn[29], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m query \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mWhat is the zucchini?\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m----> 2\u001b[0m docs \u001b[38;5;241m=\u001b[39m \u001b[43mdocs_retriever\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msimilarity_search\u001b[49m\u001b[43m(\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 4\u001b[0m \u001b[38;5;28mprint\u001b[39m(docs)\n",
|
211 |
+
"File \u001b[0;32m~/miniconda/envs/mlops/lib/python3.10/site-packages/langchain/vectorstores/chroma.py:182\u001b[0m, in \u001b[0;36mChroma.similarity_search\u001b[0;34m(self, query, k, filter, **kwargs)\u001b[0m\n\u001b[1;32m 165\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21msimilarity_search\u001b[39m(\n\u001b[1;32m 166\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 167\u001b[0m query: \u001b[38;5;28mstr\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 170\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[1;32m 171\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m List[Document]:\n\u001b[1;32m 172\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Run similarity search with Chroma.\u001b[39;00m\n\u001b[1;32m 173\u001b[0m \n\u001b[1;32m 174\u001b[0m \u001b[38;5;124;03m Args:\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 180\u001b[0m \u001b[38;5;124;03m List[Document]: List of documents most similar to the query text.\u001b[39;00m\n\u001b[1;32m 181\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 182\u001b[0m docs_and_scores \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msimilarity_search_with_score\u001b[49m\u001b[43m(\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mk\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mfilter\u001b[39;49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mfilter\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 183\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m [doc \u001b[38;5;28;01mfor\u001b[39;00m doc, _ \u001b[38;5;129;01min\u001b[39;00m docs_and_scores]\n",
|
212 |
+
"File \u001b[0;32m~/miniconda/envs/mlops/lib/python3.10/site-packages/langchain/vectorstores/chroma.py:229\u001b[0m, in \u001b[0;36mChroma.similarity_search_with_score\u001b[0;34m(self, query, k, filter, **kwargs)\u001b[0m\n\u001b[1;32m 227\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 228\u001b[0m query_embedding \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_embedding_function\u001b[38;5;241m.\u001b[39membed_query(query)\n\u001b[0;32m--> 229\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__query_collection\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 230\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery_embeddings\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[43mquery_embedding\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mn_results\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mk\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mwhere\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mfilter\u001b[39;49m\n\u001b[1;32m 231\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 233\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m _results_to_docs_and_scores(results)\n",
|
213 |
+
"File \u001b[0;32m~/miniconda/envs/mlops/lib/python3.10/site-packages/langchain/utils.py:52\u001b[0m, in \u001b[0;36mxor_args.<locals>.decorator.<locals>.wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 46\u001b[0m invalid_group_names \u001b[38;5;241m=\u001b[39m [\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(arg_groups[i]) \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m invalid_groups]\n\u001b[1;32m 47\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 48\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExactly one argument in each of the following\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 49\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m groups must be defined:\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 50\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(invalid_group_names)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 51\u001b[0m )\n\u001b[0;32m---> 52\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
|
214 |
+
"File \u001b[0;32m~/miniconda/envs/mlops/lib/python3.10/site-packages/langchain/vectorstores/chroma.py:128\u001b[0m, in \u001b[0;36mChroma.__query_collection\u001b[0;34m(self, query_texts, query_embeddings, n_results, where, **kwargs)\u001b[0m\n\u001b[1;32m 120\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 121\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_collection\u001b[38;5;241m.\u001b[39mquery(\n\u001b[1;32m 122\u001b[0m query_texts\u001b[38;5;241m=\u001b[39mquery_texts,\n\u001b[1;32m 123\u001b[0m query_embeddings\u001b[38;5;241m=\u001b[39mquery_embeddings,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 126\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 127\u001b[0m )\n\u001b[0;32m--> 128\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[43mchromadb\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43merrors\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mNotEnoughElementsException\u001b[49m:\n\u001b[1;32m 129\u001b[0m logger\u001b[38;5;241m.\u001b[39merror(\n\u001b[1;32m 130\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mChroma collection \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_collection\u001b[38;5;241m.\u001b[39mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 131\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcontains fewer than \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mi\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m elements.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 132\u001b[0m )\n\u001b[1;32m 133\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m chromadb\u001b[38;5;241m.\u001b[39merrors\u001b[38;5;241m.\u001b[39mNotEnoughElementsException(\n\u001b[1;32m 134\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mNo documents found for Chroma collection \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_collection\u001b[38;5;241m.\u001b[39mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 135\u001b[0m )\n",
|
215 |
+
"\u001b[0;31mAttributeError\u001b[0m: module 'chromadb.errors' has no attribute 'NotEnoughElementsException'"
|
216 |
+
]
|
217 |
+
}
|
218 |
+
],
|
219 |
+
"source": [
|
220 |
+
"query = \"What is the zucchini?\"\n",
|
221 |
+
"docs = docs_retriever.similarity_search(query)\n",
|
222 |
+
"\n",
|
223 |
+
"print(docs)"
|
224 |
+
]
|
225 |
+
},
|
226 |
+
{
|
227 |
+
"cell_type": "code",
|
228 |
+
"execution_count": 19,
|
229 |
+
"id": "96cd0d8a-d0c5-4828-81a3-5e908d3d7c9d",
|
230 |
+
"metadata": {
|
231 |
+
"tags": []
|
232 |
+
},
|
233 |
+
"outputs": [],
|
234 |
+
"source": [
|
235 |
+
"prompt = \"\"\"Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.\n",
|
236 |
+
"\n",
|
237 |
+
"Context: {context}\n",
|
238 |
+
"\n",
|
239 |
+
"Question: {question}\n",
|
240 |
+
"Helpful Answer:\"\"\"\n",
|
241 |
+
"\n",
|
242 |
+
"prompt_template = PromptTemplate(\n",
|
243 |
+
" template=prompt, input_variables=[\"context\", \"question\"]\n",
|
244 |
+
" )"
|
245 |
+
]
|
246 |
+
},
|
247 |
+
{
|
248 |
+
"cell_type": "code",
|
249 |
+
"execution_count": 22,
|
250 |
+
"id": "e3ee9598-8ea6-45cc-8a27-6b3bb8f22c11",
|
251 |
+
"metadata": {
|
252 |
+
"tags": []
|
253 |
+
},
|
254 |
+
"outputs": [],
|
255 |
+
"source": [
|
256 |
+
"retriever_infos = [\n",
|
257 |
+
" {\n",
|
258 |
+
" \"name\": \"Food Guide\", \n",
|
259 |
+
" \"description\": \"Good for answering questions about the menu\", \n",
|
260 |
+
" \"retriever\": docs_retriever.as_retriever()\n",
|
261 |
+
" },\n",
|
262 |
+
" {\n",
|
263 |
+
" \"name\": \"Bulevar Restaurant Details\", \n",
|
264 |
+
" \"description\": \"Good for answering questions about Bulevar's hours, and restaurant details such as its mission, history, and owners.\", \n",
|
265 |
+
" \"retriever\": bulevar_details_retriever.as_retriever()\n",
|
266 |
+
" }\n",
|
267 |
+
"]\n",
|
268 |
+
"\n",
|
269 |
+
"chain = MultiRetrievalQAChain.from_retrievers(OpenAI(), retriever_infos, verbose=True)"
|
270 |
+
]
|
271 |
+
},
|
272 |
+
{
|
273 |
+
"cell_type": "code",
|
274 |
+
"execution_count": 23,
|
275 |
+
"id": "3d7393ea-3464-4ef3-bfe4-3ca93546791e",
|
276 |
+
"metadata": {
|
277 |
+
"tags": []
|
278 |
+
},
|
279 |
+
"outputs": [
|
280 |
+
{
|
281 |
+
"name": "stdout",
|
282 |
+
"output_type": "stream",
|
283 |
+
"text": [
|
284 |
+
"\n",
|
285 |
+
"\n",
|
286 |
+
"\u001b[1m> Entering new MultiRetrievalQAChain chain...\u001b[0m\n",
|
287 |
+
"Bulevar Restaurant Details: {'query': 'What is Bulevar?'}\n",
|
288 |
+
"\u001b[1m> Finished chain.\u001b[0m\n",
|
289 |
+
" Bulevar is an excellent Mexican Cuisine restaurant with a laid back style to fine-dining created by Guy and Larry.\n"
|
290 |
+
]
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"source": [
|
294 |
+
"print(chain.run(\"What is Bulevar?\"))"
|
295 |
+
]
|
296 |
+
},
|
297 |
+
{
|
298 |
+
"cell_type": "code",
|
299 |
+
"execution_count": 12,
|
300 |
+
"id": "774e945a-db65-4635-bb98-3d921fee912e",
|
301 |
+
"metadata": {
|
302 |
+
"tags": []
|
303 |
+
},
|
304 |
+
"outputs": [
|
305 |
+
{
|
306 |
+
"name": "stdout",
|
307 |
+
"output_type": "stream",
|
308 |
+
"text": [
|
309 |
+
"\n",
|
310 |
+
"\n",
|
311 |
+
"\u001b[1m> Entering new MultiRetrievalQAChain chain...\u001b[0m\n",
|
312 |
+
"Food Guide: {'query': \"What's special about Niman Ranch steaks?\"}\n",
|
313 |
+
"\u001b[1m> Finished chain.\u001b[0m\n",
|
314 |
+
" Niman Ranch steaks are raised humanely and sustainably, and are dry aged for a minimum of 50 days, which concentrates the beef flavor.\n"
|
315 |
+
]
|
316 |
+
}
|
317 |
+
],
|
318 |
+
"source": [
|
319 |
+
"print(chain.run(\"What's special about Niman Ranch steaks?\"))"
|
320 |
+
]
|
321 |
+
},
|
322 |
+
{
|
323 |
+
"cell_type": "code",
|
324 |
+
"execution_count": 24,
|
325 |
+
"id": "4a934553-2904-4fdd-a632-96a9cad34a58",
|
326 |
+
"metadata": {
|
327 |
+
"tags": []
|
328 |
+
},
|
329 |
+
"outputs": [
|
330 |
+
{
|
331 |
+
"name": "stdout",
|
332 |
+
"output_type": "stream",
|
333 |
+
"text": [
|
334 |
+
"\n",
|
335 |
+
"\n",
|
336 |
+
"\u001b[1m> Entering new MultiRetrievalQAChain chain...\u001b[0m\n",
|
337 |
+
"Food Guide: {'query': 'Do you recommend dishes from Bulevar Restaurant?'}\n",
|
338 |
+
"\u001b[1m> Finished chain.\u001b[0m\n",
|
339 |
+
" We cannot recommend dishes from Bulevar Restaurant, as we are not familiar with their menu.\n"
|
340 |
+
]
|
341 |
+
}
|
342 |
+
],
|
343 |
+
"source": [
|
344 |
+
"print(chain.run(\"Do you recommend it?\"))"
|
345 |
+
]
|
346 |
+
},
|
347 |
+
{
|
348 |
+
"cell_type": "code",
|
349 |
+
"execution_count": 25,
|
350 |
+
"id": "b1563ba0-d049-40b8-a917-f4b8c535be7e",
|
351 |
+
"metadata": {
|
352 |
+
"tags": []
|
353 |
+
},
|
354 |
+
"outputs": [
|
355 |
+
{
|
356 |
+
"name": "stdout",
|
357 |
+
"output_type": "stream",
|
358 |
+
"text": [
|
359 |
+
"\n",
|
360 |
+
"\n",
|
361 |
+
"\u001b[1m> Entering new MultiRetrievalQAChain chain...\u001b[0m\n",
|
362 |
+
"Food Guide: {'query': \"What's on the crab tostada?\"}\n",
|
363 |
+
"\u001b[1m> Finished chain.\u001b[0m\n",
|
364 |
+
" The crab tostada is fried Masienda tortilla and seasoned with POW POW, topped with sliced avocado and a remoulade. It is then topped with jumbo lump crab meat lightly dressed and topped with radishes, freshly chopped chives, cilantro and Tajin powder, and a light drizzle of olive oil.\n"
|
365 |
+
]
|
366 |
+
}
|
367 |
+
],
|
368 |
+
"source": [
|
369 |
+
"print(chain.run(\"What's on the crab tostada?\"))"
|
370 |
+
]
|
371 |
+
},
|
372 |
+
{
|
373 |
+
"cell_type": "code",
|
374 |
+
"execution_count": 26,
|
375 |
+
"id": "d7c1e7c2-e534-4bbe-a6ce-53716c337864",
|
376 |
+
"metadata": {
|
377 |
+
"tags": []
|
378 |
+
},
|
379 |
+
"outputs": [
|
380 |
+
{
|
381 |
+
"name": "stdout",
|
382 |
+
"output_type": "stream",
|
383 |
+
"text": [
|
384 |
+
"\n",
|
385 |
+
"\n",
|
386 |
+
"\u001b[1m> Entering new MultiRetrievalQAChain chain...\u001b[0m\n",
|
387 |
+
"Bulevar Restaurant Details: {'query': 'When does the restaurant open?'}\n",
|
388 |
+
"\u001b[1m> Finished chain.\u001b[0m\n",
|
389 |
+
" Bulevar is open Sunday through Wednesday from 5-9pm, and Thursday through Saturday from 4-10pm. It is open for lunch on Friday from 11-3pm.\n"
|
390 |
+
]
|
391 |
+
}
|
392 |
+
],
|
393 |
+
"source": [
|
394 |
+
"print(chain.run(\"When does the restaurant open?\"))"
|
395 |
+
]
|
396 |
+
},
|
397 |
+
{
|
398 |
+
"cell_type": "code",
|
399 |
+
"execution_count": 99,
|
400 |
+
"id": "10a9a109-3c63-4824-bb5f-c5aafe92e7dd",
|
401 |
+
"metadata": {
|
402 |
+
"tags": []
|
403 |
+
},
|
404 |
+
"outputs": [
|
405 |
+
{
|
406 |
+
"name": "stdout",
|
407 |
+
"output_type": "stream",
|
408 |
+
"text": [
|
409 |
+
"\n",
|
410 |
+
"\n",
|
411 |
+
"\u001b[1m> Entering new MultiRetrievalQAChain chain...\u001b[0m\n",
|
412 |
+
"Food Guide: {'query': 'What allergies are in the Churros?'}\n",
|
413 |
+
"\u001b[1m> Finished chain.\u001b[0m\n",
|
414 |
+
" Nuts(flour), Dairy, Cinnamon\n"
|
415 |
+
]
|
416 |
+
}
|
417 |
+
],
|
418 |
+
"source": [
|
419 |
+
"print(chain.run(\"What allergies are in the Churros?\"))"
|
420 |
+
]
|
421 |
+
},
|
422 |
+
{
|
423 |
+
"cell_type": "code",
|
424 |
+
"execution_count": 100,
|
425 |
+
"id": "5b7d843b-a95f-4794-ae90-b83073b30fae",
|
426 |
+
"metadata": {
|
427 |
+
"tags": []
|
428 |
+
},
|
429 |
+
"outputs": [
|
430 |
+
{
|
431 |
+
"name": "stdout",
|
432 |
+
"output_type": "stream",
|
433 |
+
"text": [
|
434 |
+
"\n",
|
435 |
+
"\n",
|
436 |
+
"\u001b[1m> Entering new MultiRetrievalQAChain chain...\u001b[0m\n",
|
437 |
+
"Food Guide: {'query': 'What are the Churros?'}\n",
|
438 |
+
"\u001b[1m> Finished chain.\u001b[0m\n",
|
439 |
+
" The Churros are traditional mini churros deep fried and tossed with a cinnamon/sugar blend, served with house made dulce de leche and chocolate ganache dipping sauces, and topped with powdered sugar. They contain nuts (flour), dairy, and cinnamon.\n"
|
440 |
+
]
|
441 |
+
}
|
442 |
+
],
|
443 |
+
"source": [
|
444 |
+
"print(chain.run(\"What are the Churros?\"))"
|
445 |
+
]
|
446 |
+
},
|
447 |
+
{
|
448 |
+
"cell_type": "code",
|
449 |
+
"execution_count": 101,
|
450 |
+
"id": "49ff4757-463f-4616-ad1e-0693a16e72bd",
|
451 |
+
"metadata": {
|
452 |
+
"tags": []
|
453 |
+
},
|
454 |
+
"outputs": [
|
455 |
+
{
|
456 |
+
"name": "stdout",
|
457 |
+
"output_type": "stream",
|
458 |
+
"text": [
|
459 |
+
"\n",
|
460 |
+
"\n",
|
461 |
+
"\u001b[1m> Entering new MultiRetrievalQAChain chain...\u001b[0m\n",
|
462 |
+
"Food Guide: {'query': 'What allergies are in the tarta de limon?'}\n",
|
463 |
+
"\u001b[1m> Finished chain.\u001b[0m\n",
|
464 |
+
" Dairy, Egg, Honey, Citrus\n"
|
465 |
+
]
|
466 |
+
}
|
467 |
+
],
|
468 |
+
"source": [
|
469 |
+
"print(chain.run(\"What allergies are on the tarta de limon?\"))"
|
470 |
+
]
|
471 |
+
},
|
472 |
+
{
|
473 |
+
"cell_type": "code",
|
474 |
+
"execution_count": null,
|
475 |
+
"id": "59d54fdb-11da-437b-b513-a93d056ee713",
|
476 |
+
"metadata": {},
|
477 |
+
"outputs": [],
|
478 |
+
"source": []
|
479 |
+
}
|
480 |
+
],
|
481 |
+
"metadata": {
|
482 |
+
"kernelspec": {
|
483 |
+
"display_name": "Python 3.10 (mlops)",
|
484 |
+
"language": "python",
|
485 |
+
"name": "mlops"
|
486 |
+
},
|
487 |
+
"language_info": {
|
488 |
+
"codemirror_mode": {
|
489 |
+
"name": "ipython",
|
490 |
+
"version": 3
|
491 |
+
},
|
492 |
+
"file_extension": ".py",
|
493 |
+
"mimetype": "text/x-python",
|
494 |
+
"name": "python",
|
495 |
+
"nbconvert_exporter": "python",
|
496 |
+
"pygments_lexer": "ipython3",
|
497 |
+
"version": "3.10.8"
|
498 |
+
}
|
499 |
+
},
|
500 |
+
"nbformat": 4,
|
501 |
+
"nbformat_minor": 5
|
502 |
+
}
|
app.py
ADDED
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# gradio imports
|
2 |
+
import gradio as gr
|
3 |
+
import os
|
4 |
+
import time
|
5 |
+
|
6 |
+
# Imports
|
7 |
+
import os
|
8 |
+
|
9 |
+
import openai
|
10 |
+
from langchain.chains import ConversationalRetrievalChain
|
11 |
+
|
12 |
+
from langchain.embeddings.openai import OpenAIEmbeddings
|
13 |
+
from langchain.document_loaders import TextLoader
|
14 |
+
from langchain.text_splitter import MarkdownTextSplitter
|
15 |
+
# from langchain.chat_models import ChatOpenAI
|
16 |
+
# from langchain.text_splitter import CharacterTextSplitter
|
17 |
+
from langchain.vectorstores import Chroma
|
18 |
+
# from langchain.document_loaders import TextLoader
|
19 |
+
|
20 |
+
# from langchain.memory import ConversationBufferMemory
|
21 |
+
# from langchain.chat_models import ChatOpenAI
|
22 |
+
from langchain.chains.router import MultiRetrievalQAChain
|
23 |
+
from langchain.llms import OpenAI
|
24 |
+
|
25 |
+
css="""
|
26 |
+
#col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
|
27 |
+
"""
|
28 |
+
|
29 |
+
title = """
|
30 |
+
<div style="text-align: center;max-width: 700px;">
|
31 |
+
<h1>Chat about Bulevar's Menu</h1>
|
32 |
+
</div>
|
33 |
+
"""
|
34 |
+
|
35 |
+
prompt_hints = """
|
36 |
+
<div style="text-align: center;max-width: 700px;">
|
37 |
+
<p style="text-align: left;">What is in the crab tostada?<br />
|
38 |
+
</div>
|
39 |
+
"""
|
40 |
+
|
41 |
+
# from index import PERSIST_DIRECTORY, CalendarIndex
|
42 |
+
REST_PERSIST_DIRECTORY = "chromadb_bul_details"
|
43 |
+
FOOD_GUIDE_PERSIST_DIRECTORY = "chromadb_food_guide"
|
44 |
+
# Create embeddings
|
45 |
+
|
46 |
+
# # create memory object
|
47 |
+
# from langchain.memory import ConversationBufferMemory
|
48 |
+
# memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
49 |
+
|
50 |
+
def loading_pdf():
|
51 |
+
return "Loading..."
|
52 |
+
|
53 |
+
def loading_database(open_ai_key):
|
54 |
+
if open_ai_key is not None:
|
55 |
+
os.environ['OPENAI_API_KEY'] = open_ai_key
|
56 |
+
openai.api_key = open_ai_key
|
57 |
+
|
58 |
+
embeddings = OpenAIEmbeddings(openai_api_key=open_ai_key)
|
59 |
+
# adds these restuarant details setnences
|
60 |
+
bulevar_restaurant_texts = [
|
61 |
+
"Bulevar is open Sunday through Wednesday from 5-9pm, and Thursday through Saturday from 4-10pm. It is open for lunch on Friday from 11-3pm",
|
62 |
+
"Bulevar is located in the Arboretum at 360 and Mopac, next to Eddie V's",
|
63 |
+
"Bulevar offers tasty Mexican Cuisine with a laid back style to fine-dining.",
|
64 |
+
"Bulevar is another restaurant created by Guy and Larry. With the success of their ATX Cocina, Bulevar has created another unique dining experience with high quality dishes."
|
65 |
+
]
|
66 |
+
bulevar_details_retriever = Chroma.from_texts(bulevar_restaurant_texts, persist_directory=REST_PERSIST_DIRECTORY, embedding_function= embeddings)
|
67 |
+
if not os.path.exists(REST_PERSIST_DIRECTORY):
|
68 |
+
save_dir(bulevar_details_retriever)
|
69 |
+
loader = TextLoader('raw_text/food_guide.md')
|
70 |
+
documents = loader.load()
|
71 |
+
|
72 |
+
# adds the food_guide database
|
73 |
+
text_splitter = MarkdownTextSplitter(chunk_size=1000, chunk_overlap=0)
|
74 |
+
docs = text_splitter.split_documents(documents)
|
75 |
+
|
76 |
+
docs_retriever = Chroma.from_documents(docs, persist_directory=FOOD_GUIDE_PERSIST_DIRECTORY, embedding_function= embeddings)
|
77 |
+
|
78 |
+
if not os.path.exists(FOOD_GUIDE_PERSIST_DIRECTORY):
|
79 |
+
save_dir(docs_retriever)
|
80 |
+
retriever_infos = [
|
81 |
+
{
|
82 |
+
"name": "Food Guide",
|
83 |
+
"description": "Good for answering questions about the menu",
|
84 |
+
"retriever": docs_retriever.as_retriever()
|
85 |
+
},
|
86 |
+
{
|
87 |
+
"name": "Bulevar Restaurant Details",
|
88 |
+
"description": "Good for answering questions about Bulevar's hours, and restaurant details such as its mission, history, and owners.",
|
89 |
+
"retriever": bulevar_details_retriever.as_retriever()
|
90 |
+
}
|
91 |
+
]
|
92 |
+
global chain
|
93 |
+
chain = MultiRetrievalQAChain.from_retrievers(OpenAI(temperature=0, openai_api_key=open_ai_key), retriever_infos, verbose=True)
|
94 |
+
return "Ready"
|
95 |
+
else:
|
96 |
+
return "You forgot OpenAI API key"
|
97 |
+
|
98 |
+
def save_dir(vectorstore_retriever):
|
99 |
+
vectorstore_retriever.persist()
|
100 |
+
|
101 |
+
def add_text(history, text):
|
102 |
+
history = history + [(text, None)]
|
103 |
+
return history, ""
|
104 |
+
|
105 |
+
|
106 |
+
def bot(history):
|
107 |
+
response = infer(history[-1][0], history)
|
108 |
+
history[-1][1] = ""
|
109 |
+
for character in response:
|
110 |
+
history[-1][1] += character
|
111 |
+
time.sleep(0.05)
|
112 |
+
yield history
|
113 |
+
|
114 |
+
|
115 |
+
def infer(question, history):
|
116 |
+
# print("Here")
|
117 |
+
# print(question)
|
118 |
+
# print(history)
|
119 |
+
# print("DISPLAYED!!!")
|
120 |
+
res = []
|
121 |
+
# for human, ai in history[:-1]:
|
122 |
+
# pair = (human, ai)
|
123 |
+
# res.append(pair)
|
124 |
+
# print("now ask something new")
|
125 |
+
chat_history = res
|
126 |
+
query = question
|
127 |
+
result = chain({"input": query})
|
128 |
+
return result["result"]
|
129 |
+
|
130 |
+
def update_message(question_component, chat_prompts):
|
131 |
+
question_component.value = chat_prompts.get_name()
|
132 |
+
return None
|
133 |
+
|
134 |
+
with gr.Blocks(css=css) as demo:
|
135 |
+
with gr.Column(elem_id="col-container"):
|
136 |
+
gr.HTML(title)
|
137 |
+
with gr.Column():
|
138 |
+
with gr.Row():
|
139 |
+
openai_key = gr.Textbox(label="OpenAI API key", type="password")
|
140 |
+
submit_api_key = gr.Button("Submit")
|
141 |
+
with gr.Row():
|
142 |
+
langchain_status = gr.Textbox(label="Status", placeholder="", interactive=False)
|
143 |
+
|
144 |
+
chatbot = gr.Chatbot([], elem_id="chatbot").style(height=350)
|
145 |
+
question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter ")
|
146 |
+
submit_btn = gr.Button("Send Message")
|
147 |
+
gr.HTML(prompt_hints)
|
148 |
+
|
149 |
+
submit_api_key.click(loading_database, inputs=[openai_key], outputs=[langchain_status], queue=False)
|
150 |
+
# demo.load(loading_database, None, langchain_status)
|
151 |
+
question.submit(add_text, [chatbot, question], [chatbot, question]).then(
|
152 |
+
bot, chatbot, chatbot
|
153 |
+
)
|
154 |
+
submit_btn.click(add_text, [chatbot, question], [chatbot, question]).then(
|
155 |
+
bot, chatbot, chatbot)
|
156 |
+
|
157 |
+
demo.queue(concurrency_count=2, max_size=20).launch()
|
build_database.py
ADDED
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import openai
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
load_dotenv()
|
5 |
+
|
6 |
+
print(os.getenv("OPENAI_API_KEY"))
|
7 |
+
openai.api_key = os.getenv("OPENAI_API_KEY")
|
8 |
+
|
9 |
+
from langchain import PromptTemplate
|
10 |
+
from langchain.chains import RetrievalQA
|
11 |
+
|
12 |
+
from langchain.embeddings.openai import OpenAIEmbeddings
|
13 |
+
from langchain.chat_models import ChatOpenAI
|
14 |
+
from langchain.text_splitter import CharacterTextSplitter
|
15 |
+
from langchain.text_splitter import MarkdownTextSplitter
|
16 |
+
from langchain.vectorstores import Chroma
|
17 |
+
from langchain.document_loaders import TextLoader
|
18 |
+
|
19 |
+
from langchain.agents import Tool
|
20 |
+
from langchain.memory import ConversationBufferMemory
|
21 |
+
from langchain.chat_models import ChatOpenAI
|
22 |
+
from langchain.utilities import SerpAPIWrapper
|
23 |
+
from langchain.agents import initialize_agent
|
24 |
+
from langchain.agents import AgentType
|
25 |
+
# from langchain.vectorstores import FAISS
|
26 |
+
|
27 |
+
from langchain.chains.router import MultiRetrievalQAChain
|
28 |
+
from langchain.llms import OpenAI
|
29 |
+
|
30 |
+
bulevar_restaurant_texts = [
|
31 |
+
"Bulevar is open Sunday through Wednesday from 5-9pm, and Thursday through Satruday from 4-10pm",
|
32 |
+
"Bulevar is located in the Arboretum at 360 and Mopac, next to Eddie V's",
|
33 |
+
"Bulevar offers tasty Mexican Cuisine, with a laid back style to fine-dining.",
|
34 |
+
]
|
35 |
+
bulevar_details_retriever = Chroma.from_texts(bulevar_restaurant_texts, embedding_function= OpenAIEmbeddings())
|
36 |
+
|
37 |
+
loader = TextLoader('raw_text/food_guide.md')
|
38 |
+
documents = loader.load()
|
39 |
+
|
40 |
+
text_splitter = MarkdownTextSplitter(chunk_size=750, chunk_overlap=35)
|
41 |
+
docs = text_splitter.split_documents(documents)
|
42 |
+
|
43 |
+
embeddings = OpenAIEmbeddings()
|
44 |
+
|
45 |
+
docs_retriever = Chroma.from_documents(docs, embeddings)
|
46 |
+
|
47 |
+
query = "Who is Jonny?"
|
48 |
+
docs = docs_retriever.similarity_search(query)
|
49 |
+
|
50 |
+
print(docs[0].page_content)
|
51 |
+
|
52 |
+
prompt = """Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
53 |
+
|
54 |
+
Context: {context}
|
55 |
+
|
56 |
+
Question: {question}
|
57 |
+
Helpful Answer:"""
|
58 |
+
|
59 |
+
prompt_template = PromptTemplate(
|
60 |
+
template=prompt, input_variables=["context", "question"]
|
61 |
+
)
|
62 |
+
qa_chain = RetrievalQA.from_chain_type(llm= ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0.0),
|
63 |
+
chain_type='stuff',
|
64 |
+
retriever=docs_retriever.as_retriever(), # the doc search
|
65 |
+
# k=4, # takes the top 4 documents from the search
|
66 |
+
return_source_documents=True,
|
67 |
+
chain_type_kwargs={"prompt": prompt_template, "verbose": True},
|
68 |
+
)
|
69 |
+
result = qa_chain({"query": query})
|
70 |
+
print(result["result"])
|
71 |
+
|
72 |
+
retriever_infos = [
|
73 |
+
{
|
74 |
+
"name": "Food Guide",
|
75 |
+
"description": "Good for answering questions about Bulevar's dinner menu",
|
76 |
+
"retriever": docs_retriever.as_retriever()
|
77 |
+
},
|
78 |
+
{
|
79 |
+
"name": "Bulevar Restaurant Details",
|
80 |
+
"description": "Good for answering questions about Bulevar's hours, and restaurant details such as its mission, history, and owners.",
|
81 |
+
"retriever": bulevar_details_retriever
|
82 |
+
}
|
83 |
+
]
|
84 |
+
|
85 |
+
chain = MultiRetrievalQAChain.from_retrievers(OpenAI(), retriever_infos, verbose=True)
|
86 |
+
|
87 |
+
print(chain.run("What is Bulevar?"))
|
88 |
+
print(chain.run("What kinds of meats are offered?"))
|
89 |
+
|
90 |
+
|
91 |
+
# print(result["source_documents"])
|
92 |
+
|
93 |
+
# question answer
|
94 |
+
|
95 |
+
# How is Dialogues different from others?
|
96 |
+
# Dialogues is different from others because it is a well-thought-out game with several players, built-in trust, a simple interface, and a feature that allows the user to select and align with the values that matter to them. It is designed to offer value to the individual and is not dictated by Silicon Valley. The founder has put everything they like about reading books, watching movies, and having conversations into building the app. The app also uses a measurement of normality based on duration of time to respond to abnormal and normal signals in the data.
|
97 |
+
|
98 |
+
# How is the product coming along?
|
99 |
+
# The product is nearly half a million lines of code and has an incredibly well-developed and defined product idea, but it is not yet ready for business as the creator is still seeking funding partners and wants to perfect more of it before opening up.
|
100 |
+
|
101 |
+
# Who is Jonny?
|
102 |
+
# Jonny is a person who has created a product related to AI, specifically LLMs and chatbot games, and is seeking investment from potential investors. He believes that his product can increase the valuation of existing companies and is confident in his engineering abilities. He also believes that the future of LLMs will be with writers and is focused on crafting writing to service each person at a personal level.
|
103 |
+
|
requirements.txt
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
openai
|
2 |
+
tiktoken
|
3 |
+
chromadb
|
4 |
+
langchain
|
steps.md
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Steps
|
2 |
+
|
3 |
+
1. Pre-process the knowledge base (docs)
|
4 |
+
2. Store it in a database (embeddings)
|
5 |
+
3. Inject content into GPT-3 prompt
|
6 |
+
|
7 |
+
# App building specs
|
8 |
+
|
9 |
+
1. langchain - for chaining prompts
|
10 |
+
2. langflow - displays prompting with database
|
11 |
+
3. gradio - integrates with huggingface (important, to easily demo the work being done, and let people pay for the openai results)
|
12 |
+
4. pinecone (cloud vectorstore database, instead of chromadb which runs local)
|
13 |
+
|
14 |
+
|