Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,10 +4,15 @@ from langchain_groq import ChatGroq
|
|
4 |
from langchain.prompts import ChatPromptTemplate
|
5 |
from langchain.chains import RetrievalQA, ConversationalRetrievalChain
|
6 |
from langchain.memory import ConversationBufferMemory
|
|
|
|
|
7 |
|
8 |
def rag_retriever(message, history, system_prompt, num_sources=4, temperature=0):
|
9 |
chat = ChatGroq(temperature=temperature, model_name="llama3-70b-8192", api_key=os.getenv("GROQ_API_KEY"))
|
10 |
|
|
|
|
|
|
|
11 |
prompt_template = ChatPromptTemplate.from_messages([
|
12 |
("system", system_prompt+"""
|
13 |
|
|
|
4 |
from langchain.prompts import ChatPromptTemplate
|
5 |
from langchain.chains import RetrievalQA, ConversationalRetrievalChain
|
6 |
from langchain.memory import ConversationBufferMemory
|
7 |
+
from langchain_huggingface.embeddings import HuggingFaceEmbeddings
|
8 |
+
from langchain.vectorstores import Chroma
|
9 |
|
10 |
def rag_retriever(message, history, system_prompt, num_sources=4, temperature=0):
|
11 |
chat = ChatGroq(temperature=temperature, model_name="llama3-70b-8192", api_key=os.getenv("GROQ_API_KEY"))
|
12 |
|
13 |
+
embeddings = HuggingFaceEmbeddings(model_name="avsolatorio/GIST-large-Embedding-v0")
|
14 |
+
store = Chroma(persist_directory="chroma.sqlite3", embedding_function=embeddings, collection_name='ai_act')
|
15 |
+
|
16 |
prompt_template = ChatPromptTemplate.from_messages([
|
17 |
("system", system_prompt+"""
|
18 |
|