Updated embeddings
Browse files- app.py +5 -6
- climateqa/engine/embeddings.py +1 -0
app.py
CHANGED
@@ -13,12 +13,6 @@ from azure.storage.fileshare import ShareServiceClient
|
|
13 |
import re
|
14 |
import json
|
15 |
|
16 |
-
|
17 |
-
# Langchain
|
18 |
-
from langchain.embeddings import HuggingFaceEmbeddings
|
19 |
-
from langchain.schema import AIMessage, HumanMessage
|
20 |
-
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
21 |
-
|
22 |
# ClimateQ&A imports
|
23 |
from climateqa.engine.llm import get_llm
|
24 |
# from climateqa.chains import load_qa_chain_with_docs,load_qa_chain_with_text
|
@@ -46,6 +40,8 @@ theme = gr.themes.Base(
|
|
46 |
font=[gr.themes.GoogleFont("Poppins"), "ui-sans-serif", "system-ui", "sans-serif"],
|
47 |
)
|
48 |
|
|
|
|
|
49 |
|
50 |
|
51 |
init_prompt = ""
|
@@ -91,7 +87,10 @@ def parse_output_llm_with_sources(output):
|
|
91 |
|
92 |
|
93 |
# Create embeddings function and LLM
|
|
|
94 |
embeddings_function = get_embeddings_function()
|
|
|
|
|
95 |
|
96 |
# Create vectorstore and retriever
|
97 |
vectorstore = get_pinecone_vectorstore(embeddings_function)
|
|
|
13 |
import re
|
14 |
import json
|
15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
# ClimateQ&A imports
|
17 |
from climateqa.engine.llm import get_llm
|
18 |
# from climateqa.chains import load_qa_chain_with_docs,load_qa_chain_with_text
|
|
|
40 |
font=[gr.themes.GoogleFont("Poppins"), "ui-sans-serif", "system-ui", "sans-serif"],
|
41 |
)
|
42 |
|
43 |
+
print("1")
|
44 |
+
|
45 |
|
46 |
|
47 |
init_prompt = ""
|
|
|
87 |
|
88 |
|
89 |
# Create embeddings function and LLM
|
90 |
+
print("1")
|
91 |
embeddings_function = get_embeddings_function()
|
92 |
+
print("1")
|
93 |
+
|
94 |
|
95 |
# Create vectorstore and retriever
|
96 |
vectorstore = get_pinecone_vectorstore(embeddings_function)
|
climateqa/engine/embeddings.py
CHANGED
@@ -11,6 +11,7 @@ def get_embeddings_function(version = "v1.2"):
|
|
11 |
|
12 |
model_name = "BAAI/bge-base-en-v1.5"
|
13 |
encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity
|
|
|
14 |
embeddings_function = HuggingFaceBgeEmbeddings(
|
15 |
model_name=model_name,
|
16 |
encode_kwargs=encode_kwargs,
|
|
|
11 |
|
12 |
model_name = "BAAI/bge-base-en-v1.5"
|
13 |
encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity
|
14 |
+
print("Loading embeddings model: ", model_name)
|
15 |
embeddings_function = HuggingFaceBgeEmbeddings(
|
16 |
model_name=model_name,
|
17 |
encode_kwargs=encode_kwargs,
|