Spaces:
Runtime error
Runtime error
from flask import Flask, request, jsonify | |
from langchain_community.vectorstores import Chroma | |
from langchain_huggingface import HuggingFaceEmbeddings | |
from langchain_ollama import OllamaLLM | |
from langchain.chains.question_answering import load_qa_chain | |
from langchain import PromptTemplate | |
app = Flask(__name__) | |
# Initialize the language models | |
llm = OllamaLLM(model="llama3.2") | |
# Initialize HuggingFaceEmbeddings and Chroma | |
model_name = "intfloat/multilingual-e5-large" | |
load_from_dir = "Hadith_Chroma_db" | |
embedding_llm = HuggingFaceEmbeddings(model_name=model_name) | |
loaded_vector_db = Chroma( | |
persist_directory=load_from_dir, | |
embedding_function=embedding_llm | |
) | |
def get_similar_docs(query): | |
"""Retrieve similar documents based on the query.""" | |
similar_docs = loaded_vector_db.similarity_search(query, k=2) | |
return similar_docs | |
def ask_llms(query_text): | |
"""Ask the LLM to provide an answer based on similar documents.""" | |
similar_docs = get_similar_docs(query_text) | |
qna_template = '\n'.join([ | |
"Answer the following question using the context provided.", | |
"If the answer is not included in the context, say 'No answer available'.", | |
"### Context:", | |
"{context}", | |
"### Question:", | |
"{question}", | |
"### Answer:" | |
]) | |
qna_prompt = PromptTemplate( | |
template=qna_template, | |
input_variables=['context', 'question'], | |
verbose=True | |
) | |
stuff_chain = load_qa_chain(llm, chain_type="stuff", prompt=qna_prompt) | |
final_answer = stuff_chain.invoke({ | |
"input_documents": similar_docs, | |
"question": query_text | |
}) | |
return final_answer['output_text'] | |
def aiPost(): | |
"""Handle POST requests to the /ai endpoint.""" | |
try: | |
json_content = request.json | |
if not json_content or 'query' not in json_content: | |
return jsonify({"error": "Invalid input, 'query' field is required"}), 400 | |
query = json_content.get('query') | |
# Get the response from the LLM based on the query | |
response = ask_llms(query) | |
return jsonify({"response": response}) | |
except Exception as e: | |
return jsonify({"error": str(e)}), 500 | |
def start_app(): | |
"""Start the Flask app.""" | |
app.run(host="0.0.0.0", port=8080, debug=True) | |
if __name__ == '__main__': | |
start_app() | |