File size: 359 Bytes
9dc99bc
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
from langchain.chains import RetrievalQA
from langchain_ollama import ChatOllama

def Q_A(vectorstore,question):
    ollama_llm = ChatOllama(
    model="llama3.1",
    temperature=0.5,
)
    qa = RetrievalQA.from_chain_type(llm=ollama_llm, chain_type="stuff", retriever=vectorstore.as_retriever())
    answer = qa.invoke(question)

    return answer['result']