Spaces:
Runtime error
Runtime error
File size: 2,083 Bytes
b2ed690 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
from langchain_ollama import OllamaLLM
import similarity
from langchain.chains.question_answering import load_qa_chain
from langchain import PromptTemplate
# Initialize an instance of the Ollama model
llm = OllamaLLM(model="llama3.2")
# query_text = "ما فضل صلاة العصر؟"
# print(f'Query : {query_text}')
# similar_docs = similarity.get_similar_docs(query_text)
# # print(f'similar_docs : {similar_docs}')
# qna_template = '\n'.join([
# "Answer the following question using the context provided.",
# 'please provide answer within context with details If exist.'
# "If the answer is not included in the context, say ",
# "No answer available",
# "### Context:",
# "{context}",
# """,
# "### Question:",
# "{question}",
# """,
# "### Answer:",
# ])
# qna_prompt = PromptTemplate(
# template = qna_template,
# input_variables=['context', 'question'],
# verbose=True
# )
# stuff_chain = load_qa_chain(llm, chain_type="stuff", prompt=qna_prompt)
# final_answer = stuff_chain({
# "input_documents": similar_docs,
# "question": query_text
# }, return_only_outputs=True)
# print(final_answer)
def ask_llms(query_text):
similar_docs = similarity.get_similar_docs(query_text)
# print(f'similar_docs : {similar_docs}')
qna_template = '\n'.join([
"Answer the following question using the context provided.",
"If the answer is not included in the context, say ",
"No answer available",
"### Context:",
"{context}",
""",
"### Question:",
"{question}",
""",
"### Answer:",
])
qna_prompt = PromptTemplate(
template = qna_template,
input_variables=['context', 'question'],
verbose=True
)
stuff_chain = load_qa_chain(llm, chain_type="stuff", prompt=qna_prompt)
final_answer = stuff_chain.invoke({
"input_documents": similar_docs,
"question": query_text
})
return final_answer['output_text']
|