File size: 6,004 Bytes
1109b1e
6b8b942
1109b1e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6b8b942
 
 
1109b1e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
import os
import gradio as gr
from dotenv import load_dotenv
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.tools import tool
from langchain.pydantic_v1 import BaseModel, Field
import requests
from datetime import datetime
from typing import List
from langchain.prompts import ChatPromptTemplate
from langchain.output_parsers import PydanticOutputParser
from langchain.memory import ConversationBufferMemory
from langchain.agents import AgentExecutor, create_tool_calling_agent

load_dotenv(dotenv_path='api.env.txt')
Langchain_API_KEY = os.getenv('LANGCHAIN_API')
GOOGLE_API_KEY = os.getenv('GOOGLE_API')
WEATHER_API_KEY = os.getenv('WEATHER_API')

os.environ["GOOGLE_API_KEY"] = GOOGLE_API_KEY

llm = ChatGoogleGenerativeAI(
    model="gemini-1.5-flash",
    temperature=0,
    max_tokens=None,
    timeout=None,
    max_retries=2,
)



class WeatherInput(BaseModel):
    city: str = Field(default=None, description="The city to get the weather for.")


def get_location_from_ip():
    ip = requests.get('https://api.ipify.org').text
    response = requests.get(f"https://ipapi.co/{ip}/json/").json()
    return {
        'city': response.get('city'),
        'latitude': response.get('latitude'),
        'longitude': response.get('longitude')
    }


@tool("get_weather_by_location", args_schema=WeatherInput, return_direct=True)
def get_weather_by_location(city: str = None):
    if not city:
        location = get_location_from_ip()
        city = location['city']

    url = f"https://api.tomorrow.io/v4/timelines?apikey={WEATHER_API_KEY}"
    payload = {
        "location": city,
        "fields": ["temperature", "humidity", "windSpeed"],
        "units": "metric",
        "timesteps": ["1d"],
        "startTime": "now",
        "endTime": "nowPlus5d",
        "timezone": "auto"
    }
    headers = {
        "accept": "application/json",
        "content-type": "application/json"
    }

    response = requests.post(url, json=payload, headers=headers).json()

    return format_weather_response(response, city)


def format_weather_response(weather_data, city):
    intervals = weather_data['data']['timelines'][0]['intervals']
    response = f"Weather forecast for {city}:\n\n"

    for interval in intervals:
        date = datetime.fromisoformat(interval['startTime']).strftime("%A, %B %d")
        temp = round(interval['values']['temperature'], 1)
        humidity = round(interval['values']['humidity'], 1)
        wind_speed = round(interval['values']['windSpeed'], 1)

        response += f"{date}:\n"
        response += f"  Temperature: {temp}°C\n"
        response += f"  Humidity: {humidity}%\n"
        response += f"  Wind Speed: {wind_speed} km/h\n\n"

    return response

class DailyWeather(BaseModel):
    date: str
    temperature: float
    condition: str
    humidity: float
    wind_speed: float
    advice: str

class WeatherOutput(BaseModel):
    location: str = Field(description="The location or the city for which the weather is reported")
    forecast: List[DailyWeather] = Field(description="The weather forecast for multiple days")

parser = PydanticOutputParser(pydantic_object=WeatherOutput)

prompt = ChatPromptTemplate.from_messages([
    ("system", """You are a helpful weather assistant. Your primary function is to provide weather information for cities around the world and offer advice based on the weather conditions. Here are your key responsibilities:

1. If a user asks about the weather in a specific city, use the get_weather_by_location tool to fetch and provide that information for today and the next few days.
2. If a user asks about the weather without specifying a city (e.g., "tell me the weather in my city" or "what is the weather in our city/town"), assume they're asking about their current location. Use the get_weather_by_location tool with an empty string as input to get this information.
3. After getting the weather data, always use the format_weather tool to present the information in a user-friendly format and include advice for each day.
4. Based on the weather conditions, provide relevant advice to the user for each day. For example:
   - If it's sunny, suggest outdoor activities or remind them to use sunscreen.
   - If it's rainy, advise them to bring an umbrella or suggest indoor activities.
   - If it's very cold or hot, give appropriate clothing or safety recommendations.
5. If you're unsure about the location or need more information, politely ask the user for clarification.
6. Be prepared to answer follow-up questions about the weather for the rest of the week or for a specific day.

Remember to be friendly and informative in your responses, and focus on providing a full weather forecast when asked. Use the conversation history to provide context-aware responses and avoid repeating information."""),
    ("human", "{input}"),
    ("ai", "Hello! I'd be happy to help you with the weather information for the next few days and provide some helpful advice. What would you like to know?"),
    ("human", "{input}"),
    ("ai", "I understand. Let me fetch that weather information for you and offer some advice based on the conditions."),
    ("placeholder", "{agent_scratchpad}"),
])


# Initialize tools and agent
tools = [get_weather_by_location]

memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)

agent = create_tool_calling_agent(llm, tools, prompt=prompt)

agent_executor = AgentExecutor(
    agent=agent,
    tools=tools,
    memory=memory,
    output_parser=PydanticOutputParser(pydantic_object=WeatherOutput)
)


def gradio_interface(user_input):
    result = agent_executor.invoke({"input": user_input})
    return result['output']


# Gradio UI
with gr.Blocks() as demo:
    gr.Markdown("# Weather Assistant")
    chatbot = gr.Chatbot()
    with gr.Row():
        txt = gr.Textbox(show_label=False, placeholder="Ask about the weather...").style(container=False)
        txt.submit(gradio_interface, txt, chatbot)

demo.launch()