File size: 2,933 Bytes
b486b3a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
from fastapi import FastAPI, Form
from langchain_core.runnables.base import RunnableSequence
from langchain_core.prompts import ChatPromptTemplate
from langchain_groq import ChatGroq
import os
import requests
from dotenv import load_dotenv
from langgraph.checkpoint.memory import MemorySaver
from langgraph.prebuilt import create_react_agent
from langchain_community.tools.tavily_search import TavilySearchResults
load_dotenv()

app = FastAPI()

llm = ChatGroq(
    model="llama-3.1-70b-versatile",
    temperature=0,
    max_tokens=None,
    timeout=None,
    max_retries=5,
    groq_api_key=os.getenv("GROQ_API_KEY")
)

search = TavilySearchResults(
      max_results=2,
    )
tools = [search]
memory = MemorySaver()

agent_executor = create_react_agent(llm, tools, checkpointer=memory)

def translate(target, text):
  '''

    Translates given text into target language



    Parameters:

        target (string): 2 character code to specify the target language.

        text (string): Text to be translated.



    Returns:

        res (string): Translated text.

  '''
  url = "https://microsoft-translator-text.p.rapidapi.com/translate"

  querystring = {"api-version":"3.0","profanityAction":"NoAction","textType":"plain", "to":target}

  payload = [{ "Text": text }]
  headers = {
    "x-rapidapi-key": os.getenv("RAPIDAPI_LANG_TRANS"),
    "x-rapidapi-host": "microsoft-translator-text.p.rapidapi.com",
    "Content-Type": "application/json"
  }

  response = requests.post(url, json=payload, headers=headers, params=querystring)
  res = response.json()
  return res[0]["translations"][0]["text"]


@app.post('/infer/{user_id}')
def infer(user_id: str, user_input: str = Form(...)):
    '''

        Returns the translated response from the LLM in response to a user query.



        Parameters:

            user_id (string): User ID of a user.

            user_input (string): User query.



        Returns:

            JSON Response (Dictionary): Returns a translated response from the LLM.

    '''

    user_input = translate("en", user_input) # translate user query to english

    prompt = ChatPromptTemplate.from_messages( # define a prompt
        [
            (
                "system",
                "You're a compassionate AI virtual Assistant"
            ),
            ("human", "{user_input}")
        ]
    )

    runnable = prompt | agent_executor # define a chain

    conversation = RunnableSequence( # wrap the chain along with chat history and user input
        runnable,
    )

    response = conversation.invoke( # invoke the chain by giving the user input and the chat history
        {"user_input": user_input},
        config={"configurable": {"thread_id":user_id}}
    )

    res = translate("ur", response["messages"][-1].content) # translate the response to Urdu

    return {
        "data": res
    }