File size: 1,801 Bytes
696e8b9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
from tasks.base_task import BaseTask
from utils.llama_index_utils import setup_directories
from llama_index.core import StorageContext, load_index_from_storage, ChatPromptTemplate

class QueryHandlingTask(BaseTask):
    def load_input(self, input_data):
        self.query = input_data['query']
        self.data_dir, self.persist_dir = setup_directories()

    def process(self):
        # Load the index and create the query engine
        storage_context = StorageContext.from_defaults(persist_dir=self.persist_dir)
        self.index = load_index_from_storage(storage_context)
        chat_text_qa_msgs = [
            (
                "user",
                """You are a Q&A assistant. Your main goal is to provide answers as accurately as possible, based on the instructions and context you have been given. If a question does not match the provided context or is outside the scope of the document, kindly advise the user to ask questions within the context of the document. Provide the answers in Spanish and cite the page and section where the answers were found.
                Context:
                {context_str}
                Question:
                {query_str}
                """
            )
        ]
        self.text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
        self.query_engine = self.index.as_query_engine(text_qa_template=self.text_qa_template)

    def save_output(self, result):
        # Process the query and return the response
        answer = self.query_engine.query(self.query)
        if hasattr(answer, 'response'):
            return answer.response
        elif isinstance(answer, dict) and 'response' in answer:
            return answer['response']
        else:
            return "Disculpa no pude encontrar una respuesta."