File size: 3,132 Bytes
0fadcb9
d015f0c
0fadcb9
062179e
d015f0c
062179e
 
0fadcb9
8d06b39
7b864ba
0fadcb9
 
7b864ba
0fadcb9
 
 
 
 
7b864ba
 
8d06b39
0fadcb9
d015f0c
0fadcb9
 
062179e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0fadcb9
 
 
 
062179e
0fadcb9
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
from pathlib import Path

from prompts.prompts_manager import PromptsManager
from repository.intel_npu import IntelNpuRepository
from repository.ollama import OllamaRepository
from llm.llm import Model
from repository.repository import ModelRoles
from form.form import work_categories, build_form_data_from_answers, write_pdf_form


def check_for_missing_answers(parsed_questions: dict[int, str]):
    return [k for k in parsed_questions if parsed_questions[k] is None]


def ask_again(missing_questions: list[int], user_questions: list[str], parsed_questions: dict[int, str]):
    for id_ in missing_questions:
        answer = input(f"I could not find the answer to this question: {user_questions[id_].lower()}")
        parsed_questions[id_] = answer


if __name__ == '__main__':
    prompts_manager = PromptsManager()
    user_prompt = input(f"Please describe what you need to do. To get the best results "
                        f"try to answer all the following questions:\n{'\n'.join(prompts_manager.questions)}\n\n>")

    # repository = OllamaRepository(Model("llama3.1",
    #                                     ModelRoles("system", "user", "assistant")),
    #                               prompts_manager.system_prompt,
    #                               )
    repository = IntelNpuRepository("meta-llama/Meta-Llama-3-8B-Instruct")
    repository.init()
    repository.send_prompt(f"Ingest the following information: {user_prompt}")
    answers = {x:None for x in range(0,11)}
    answer = repository.send_prompt(f"Answer the following questions, if the answer is not present just answer null. Put the answers between curly braces, separate each answer with a comma, keep the answer brief  and maintain the order in which the questions are asked. Do not add any preamble: {"\n".join(prompts_manager.verification_prompt)}")
    for idx, a in enumerate(answer['content'].split(",")):
        answers[idx] = None if 'null' in a else a

    # for idx, q in enumerate(prompts_manager.verification_prompt):
    #     answer = repository.send_prompt(
    #         f"Answer the following questions, if the answer is not present just answer null. Keep the answer brief and separate each answer with a comma and maintain the order in which the questions are asked: {q}")
    #     answers[idx] = None if 'null' in answer["content"].lower() else answer['content']
    missing_answers = check_for_missing_answers(answers)
    while missing_answers:
        ask_again(missing_answers, prompts_manager.questions, answers)
        missing_answers = check_for_missing_answers(answers)
    answer = repository.send_prompt(
        f"The work to do is {answers[1]}. Given the following categories {work_categories.values()} which ones are the most relevant? Only return one categories, separated by a semicolon")
    categories = []
    for category in answer["content"].split(";"):
        categories.extend([k for k, v in work_categories.items() if category in v])

    form_data = build_form_data_from_answers(answers, categories, f"{Path(__file__, "..", "signature.png")}")
    write_pdf_form(form_data, Path("signed_form1.pdf"))