File size: 2,079 Bytes
0fadcb9
d015f0c
d005419
0fadcb9
d005419
 
 
 
8d06b39
7b864ba
0fadcb9
 
7b864ba
0fadcb9
 
 
d005419
 
7b864ba
 
8d06b39
0fadcb9
d015f0c
0fadcb9
 
d005419
 
 
062179e
d005419
 
 
 
0fadcb9
 
 
 
d005419
 
0fadcb9
 
d005419
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
from pathlib import Path

from llm_manager.llm_parser import LlmParser
from prompts.prompts_manager import PromptsManager

from repository.repository import get_repository
from repository.repository_abc import ModelRoles, Model
from form.form import build_form_data_from_answers, write_pdf_form


def check_for_missing_answers(parsed_questions: dict[int, str]):
    return [k for k in parsed_questions if parsed_questions[k] is None]


def ask_again(missing_questions: list[int], user_questions: list[str], parsed_questions: dict[int, str]):
    for id_ in missing_questions:
        new_answer = input(f"I could not find the answer to this question: {user_questions[id_].lower()}")
        parsed_questions[id_] = new_answer


if __name__ == '__main__':
    prompts_manager = PromptsManager()
    user_prompt = input(f"Please describe what you need to do. To get the best results "
                        f"try to answer all the following questions:\n{'\n'.join(prompts_manager.questions)}\n\n>")

    repository = get_repository("intel_npu", Model("meta-llama/Meta-Llama-3-8B-Instruct",
                                                   ModelRoles("system", "user", "assistant")),
                                prompts_manager.system_prompt, Path("llm_log.txt"))
    repository.init()
    # repository.send_prompt(prompts_manager.ingest_user_answers(user_prompt))
    answer = repository.send_prompt(prompts_manager.verify_user_input_prompt(user_prompt))
    answers = LlmParser.parse_verification_prompt_answers(answer['content'])

    missing_answers = check_for_missing_answers(answers)
    while missing_answers:
        ask_again(missing_answers, prompts_manager.questions, answers)
        missing_answers = check_for_missing_answers(answers)
    answer = repository.send_prompt(prompts_manager.get_work_category(answers[1]))
    categories = LlmParser.parse_get_categories_answer(answer['content'])

    form_data = build_form_data_from_answers(answers, categories, f"{Path(__file__, "..", "signature.png")}")
    write_pdf_form(form_data, Path("signed_form1.pdf"))