File size: 4,448 Bytes
0a16a26
 
 
 
870e8c4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ab1ac21
 
 
 
 
 
8749174
ab1ac21
 
8749174
 
 
ab1ac21
 
 
 
 
 
 
 
0a16a26
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import requests
import asyncio


def get_docs(question: str, top_k: int, encoder, pinecone_index) -> list[str]:
    # encode query
    xq = encoder([question])
    # search pinecone index
    res = pinecone_index.query(vector=xq, top_k=top_k, include_metadata=True)
    # get doc text
    print(res)
    docs = [x["metadata"] for x in res["matches"]]
    return docs

def generate(query: str, docs: list[str], groq_client, messages):
    docs = "\n---\n".join(docs)
    system_message =f'''
        You are a real state assistant that answers questions about properties in Dubai using the
        context provided below that is you information.
        then please generate the response like this schema
        [ANS]
        ```json
        {{
            answer: HERE THE RESPONSE OF LLM
        }}```
        [\ANS]
        if the context may not have the answer of the question please
        ask user to provide you more information
        \n\n
        CONTEXT:\n
        {docs}
        '''
    
    # generate response
    chat_response = groq_client.chat.completions.create(
        model="llama3-70b-8192",
        messages=messages,
        stream=True
    )
    print(chat_response)
    for chunk in chat_response:
        return chunk.choices[0].delta.content
    # return chat_response.choices[0].message.content
    
def oracle_db(query:str, top_k:int) -> list[dict]:
    import oracledb
    connection = oracledb.connect(user="ai", password="testtest",dsn="91.75.21.131:9522/FREEPDB1")
    cursor = connection.cursor()
    exist = cursor.execute("""SELECT v.vector_id, prop.*, t.*,
            VECTOR_DISTANCE(v.vector,TO_VECTOR(VECTOR_EMBEDDING(ALL_MINILM_L12_V2 USING :query as data)), COSINE) AS distance
        FROM 
            ai.prop_vectors v
            JOIN ai.dld_property prop ON prop.property_id = v.property_id
            JOIN ai.dld_trans t ON t.prop_id = v.property_id
        ORDER BY distance ASC
        FETCH FIRST :top_k ROWS ONLY""", query=query, top_k=top_k)
    columns = [col[0] for col in cursor.description]
    cursor.rowfactory = lambda *args: dict(zip(columns, args))
    exist = cursor.fetchall()
    print(query)
    print(exist)
    connection.close()
    return exist


async def question_suggestion_api(message:str)-> list:
    questions = []

    # The URL endpoint
    url = "http://91.75.21.131:9080/ords/ai/rag/question_suggestion"

    # The payload to be sent with the POST request
    payload = {
        'response': message  # Replace with the actual response data
    }

    # Making the POST request
    response = requests.post(url, params=payload)
    await asyncio.sleep(2)
    
    print(response.text)
    
    # Check if the request was successful
    if response.status_code == 200:
        # Parse the response JSON
        data = response.json()
        print(data)

        # Extract and print the list of questions
        questions = data.get('expected_responses', [])
        
        if questions:
            print("Questions:")
            for idx, question in enumerate(questions):
                print(f"{idx}. {question}")
        else:
            print("No questions found in the response.")
    else:
        print(f"Request failed with status code: {response.status_code}")
        
    return questions




async def send_chatbot_request(question, cohere_api_key)-> dict:
    # The URL endpoint
    url = "http://91.75.21.131:9080/ords/ai/rag/chatbot"

    # The payload to be sent with the POST request
    payload = {
        'question': question,
        'cohere_api_key': cohere_api_key
    }

    # Making the POST request
    try:
        response = requests.post(url, params=payload,timeout=26)
        await asyncio.sleep(3)
        print("hello")
        

        # Check if the request was successful
        if response.status_code == 200:
                # Parse the response JSON
                data = response.json()
                generations = data.get("generations",[])[0]
                print(generations)
                result = generations.get('text','')
                return result
        else:
            return f"Request failed with status code: {response.status_code}"
    
    except ValueError:
        return "Error: Unable to parse JSON response."
    except requests.Timeout:
        print("The request timed out. Please try again.")
    except requests.RequestException as e:
        print(f"An error occurred: {e}")