changes in the embedding model
Browse files
query.py
CHANGED
|
@@ -22,12 +22,11 @@ chat = ChatGroq(
|
|
| 22 |
)
|
| 23 |
|
| 24 |
#Embedding Model
|
| 25 |
-
embedding = HuggingFaceInferenceAPIEmbeddings(api_key= hf_token)
|
| 26 |
-
|
| 27 |
|
| 28 |
# Connect to Pinecone
|
| 29 |
vectorstore = PineconeVectorStore.from_existing_index(
|
| 30 |
-
index_name= 'courselens',
|
| 31 |
embedding= embedding,
|
| 32 |
)
|
| 33 |
|
|
@@ -65,4 +64,5 @@ def generate_response(text):
|
|
| 65 |
response = rag_chain.invoke(text)
|
| 66 |
return response
|
| 67 |
|
|
|
|
| 68 |
#print(generate_response("I want to learn tableau. How to learn it?"))
|
|
|
|
| 22 |
)
|
| 23 |
|
| 24 |
#Embedding Model
|
| 25 |
+
embedding = HuggingFaceInferenceAPIEmbeddings(api_key= hf_token, model_name = "BAAI/bge-base-en-v1.5" )
|
|
|
|
| 26 |
|
| 27 |
# Connect to Pinecone
|
| 28 |
vectorstore = PineconeVectorStore.from_existing_index(
|
| 29 |
+
index_name= 'courselens-2',
|
| 30 |
embedding= embedding,
|
| 31 |
)
|
| 32 |
|
|
|
|
| 64 |
response = rag_chain.invoke(text)
|
| 65 |
return response
|
| 66 |
|
| 67 |
+
#Testing the responses
|
| 68 |
#print(generate_response("I want to learn tableau. How to learn it?"))
|