File size: 3,173 Bytes
11922e9
 
 
 
6d34563
7d3f212
03976cd
11922e9
 
 
 
 
0cbb1f9
 
11922e9
 
 
 
 
7d3f212
74125bf
0cbb1f9
7d3f212
 
 
 
0cbb1f9
7d3f212
 
74125bf
6d34563
 
3fe9db0
11922e9
03976cd
 
 
 
 
 
 
 
 
 
 
5fa2125
11922e9
7a7686b
 
0cbb1f9
7a7686b
 
0cbb1f9
7a7686b
5fa2125
a375160
5fa2125
 
 
54c1a7f
0cbb1f9
54c1a7f
5fa2125
0cbb1f9
5fa2125
03976cd
 
 
 
 
0cbb1f9
54c1a7f
a375160
 
 
 
55134a7
11922e9
 
1acca3e
 
 
 
 
 
 
 
11922e9
7e90d0a
78e1c7d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import os
import gradio as gr
from langchain_google_genai import GoogleGenerativeAIEmbeddings, ChatGoogleGenerativeAI
from langchain_community.agent_toolkits.load_tools import load_tools
from langchain.agents import create_react_agent
from langchain.prompts import PromptTemplate
import requests

# Set up Google API keys from environment variables
GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY')
SERPER_API_KEY = os.getenv('SERPER_API_KEY')

# Initialize embeddings and language model
gemini_embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
llm = ChatGoogleGenerativeAI(model="gemini-1.5-pro")

# Load tools
tools = load_tools(["serpapi", "llm-math"], llm=llm, serpapi_api_key=SERPER_API_KEY)

# Define a prompt using PromptTemplate with required variables
prompt = PromptTemplate(
    input_variables=["query", "agent_scratchpad", "tools", "tool_names"],
    template=(
        "You are a helpful assistant that answers questions based on the provided tools.\n"
        "Tools available: {tool_names}\n"
        "Current tools: {tools}\n"
        "Scratchpad: {agent_scratchpad}\n"
        "Question: {query}"
    )
)

# Initialize the agent with the prompt
agent = create_react_agent(tools=tools, llm=llm, prompt=prompt)

# Function to search Google using SERP API
def google_search(query):
    response = requests.get(
        f"https://api.serpapi.com/search.json?q={query}&api_key={SERPER_API_KEY}"
    )
    results = response.json()
    # Extract relevant information from the results
    if "organic_results" in results:
        return results["organic_results"][0]["snippet"]  # Return the snippet of the first result
    return "No results found."

# Function to run the agent
def search(query):
    inputs = {
        "query": query,
        "agent_scratchpad": "",  # Initial empty scratchpad
        "tools": tools,
        "tool_names": ", ".join([tool.name for tool in tools]),
        "intermediate_steps": []  # Initial empty intermediate steps
    }
    
    try:
        # Debug: Print inputs
        print("Inputs to the agent:", inputs)
        
        # Attempt to get the answer from the LLM
        output = agent.invoke(inputs)
        
        # Debug: Print output
        print("Output from the agent:", output)
        
        # Check if output is empty or not generated
        if not output or "error" in output.lower():
            print("LLM did not generate a valid output, searching Google...")
            output = google_search(query)
        
        return output

    except Exception as e:
        # Print the exception and the inputs for debugging
        print(f"Error: {e}")
        print("Inputs:", inputs)
        return str(e)

# Create the Gradio interface
iface = gr.Interface(
    fn=search,
    inputs=gr.Textbox(label="Enter your search query", placeholder="What is the hometown of the reigning men's U.S. Open champion?"),
    outputs="text",
    title="Custom Search Engine",
    description="A search engine powered by LangChain and Google Generative AI. Enter your query to get started!",
    theme="default"
)

# Launch the interface with share=True for a public link
iface.launch(share=True)