File size: 5,326 Bytes
21c9256
 
 
 
 
 
 
 
7516733
0ac31a1
21c9256
 
 
 
 
 
 
 
 
 
 
 
 
a3f5135
21c9256
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9abf8c6
21c9256
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
from llama_index import SimpleDirectoryReader, GPTListIndex, GPTVectorStoreIndex, LLMPredictor, PromptHelper
from llama_index import StorageContext, load_index_from_storage
from langchain.chat_models import ChatOpenAI
import gradio as gr
import os
import openai
from gradio.themes.utils import colors, fonts, sizes

openai.api_key = os.environ.get('openai_key')
print("kkeey: ", openai.api_key)

messages = [
    {"role": "system", "content": "follow the 4 instructions below for your outputs:"},
    {"role": "system", "content": "1. make sure all expressions are compatible with Polish"},
    {"role": "system", "content": "2. use Polish only for outputs"},
    {"role": "system", "content": "3. if you cannot answer, reply that you do not have enough information"},
    {"role": "system", "content": "4. do not make up any answer if you do know the answer"},
]

def construct_index(directory_path):
    max_input_size = 4096
    num_outputs = 512
    max_chunk_overlap = 20
    chunk_size_limit = 1000
    temperature = 0.1

    prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
    llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=temperature, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
    documents = SimpleDirectoryReader(directory_path).load_data()
    #index = GPTVectorStoreIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)
    index = GPTVectorStoreIndex.from_documents(documents, urls=[
                                                                'https://trio.house/',
                                                                'https://trio.house/kontakt/',
                                                                'https://trio.house/o-nas/',
                                                                'https://trio.house/zainwestuj-z-nami/',
                                                                'https://trio.house/potrzebujesz-konsultacji-rynku-nieruchomosci/',
                                                                'https://trio.house/potrzebujesz-remontu/',
                                                                'https://trio.house/potrzebujesz-projektu-wnetrza/',
                                                                'https://trio.house/potrzebujesz-mebli-na-wymiar/',
                                                                'https://trio.house/potrzebujesz-kredytu-na-zakup-nieruchomosci/',
                                                                'https://trio.house/makroekonomia/',
                                                                'https://trio.house/rynek-nieruchomosci/',
                                                                'https://trio.house/2023/05/24/deweloperzy-buduja-coraz-mniej/',
                                                                'https://trio.house/2023/04/27/prognozy-na-2023-2025-co-nas-czeka/',
                                                                'https://trio.house/2023/04/18/wycinka-drzew-na-wlasnej-dzialce-w-2023/',
                                                                'https://trio.house/2023/04/03/lipiec-rozpoczynamy-juz-w-kwietniu/',
                                                                'https://trio.house/2023/04/03/zmiany-w-podatku-od-czynnosci-cywilnoprawnych/',
                                                                'https://trio.house/2023/03/23/czy-aby-napewno-najdrozsze-mieszkania-sa-w-stolicy/',
        ], llm_predictor=llm_predictor, prompt_helper=prompt_helper)
    index.storage_context.persist('index.json')

    return index

def chatbotCustom(input):
    storage_context = StorageContext.from_defaults(persist_dir="index.json")
    index = load_index_from_storage(storage_context)
    query_engine = index.as_query_engine()
    response = query_engine.query(input)
#    response = index.query(input, similarity_top_k=5, response_mode="tree_summarize")
    return response.response

def chatbotGPT(input):
    if input:
        messages.append({"role": "user", "content": input})
        chat = openai.ChatCompletion.create(
            model="gpt-3.5-turbo", messages=messages
        )
        reply = chat.choices[0].message.content
        messages.append({"role": "assistant", "content": reply})
        return reply

def clear():
    return None, None

theme = gr.themes.Default(font=[gr.themes.GoogleFont("Roboto"), "sans-serif", "sans-serif"], primary_hue="neutral", secondary_hue="neutral", neutral_hue="neutral").set(
    button_primary_background_fill="#3FCCA5",
    button_primary_background_fill_dark="#3FCCA5",
    button_primary_text_color="#003F62",
    body_background_fill="FFFFFF",
    body_background_fill_dark="FFFFFF"
)

with gr.Blocks(theme=theme) as trioGPT:
	inputs = gr.Textbox(lines=4, elem_id="inputs", label="Porozmawiaj z naszym ChatBotem TrioGPT")#, elem_classes="textbox")
	outputs = gr.Textbox(label="Odpowiedź", elem_id="outputs")#, elem_classes="textbox")
	with gr.Row():
		submit_btn = gr.Button("Wyślij", variant="primary")
		clear_btn = gr.Button("Wyczyść")

	submit_btn.click(chatbotCustom, inputs=inputs, outputs=outputs)
	clear_btn.click(fn=clear, inputs=None, outputs=[inputs, outputs])

index = construct_index("data")
trioGPT.launch()#(share=True)