Spaces:
Running
Running
File size: 3,357 Bytes
dfa45b1 5f264bc dfa45b1 817c8d6 976568a 380dcc1 fc9a1cf dfa45b1 b700bc7 dfa45b1 e80fbde dfa45b1 6a833cd 3f5302d 380dcc1 6a833cd dfa45b1 6a833cd dfa45b1 f8bbfe3 dfa45b1 6a833cd fe45e48 6a833cd 380dcc1 fc9a1cf 976568a dfa45b1 976568a dfa45b1 f8bbfe3 dfa45b1 6a833cd dfa45b1 9b1ff17 fc9a1cf 9b1ff17 dfa45b1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
import gradio as gr
import os
from datetime import date
from langchain import hub
from langchain.agents import AgentExecutor, AgentType, create_openai_functions_agent, initialize_agent, tool
from langchain.chat_models import ChatOpenAI
from langchain_community.tools.tavily_search import TavilySearchResults
from dotenv import load_dotenv, find_dotenv
_ = load_dotenv(find_dotenv())
TAVILY_API_KEY = os.environ["TAVILY_API_KEY"]
config = {
"max_tokens": 1000,
"model_name": "gpt-4",
"temperature": 0,
}
AGENT_OFF = False
AGENT_ON = True
@tool
def time(text: str) -> str:
"""Returns todays date, use this for any \
questions related to knowing todays date. \
The input should always be an empty string, \
and this function will always return todays \
date - any date mathmatics should occur \
outside this function."""
return str(date.today())
def invoke(openai_api_key, prompt, agent_option):
if (openai_api_key == ""):
raise gr.Error("OpenAI API Key is required.")
if (prompt == ""):
raise gr.Error("Prompt is required.")
if (agent_option is None):
raise gr.Error("Use Agent is required.")
output = ""
try:
if (agent_option == AGENT_OFF):
output = "TODO"
else:
llm = ChatOpenAI(model_name = config["model_name"],
openai_api_key = openai_api_key,
temperature = config["temperature"])
search = TavilySearchResults()
tools = [search]
#agent = initialize_agent(tools + [time],
# llm,
# agent = AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION,
# handle_parsing_errors = True,
# verbose = True)
p = hub.pull("hwchase17/openai-functions-agent")
agent = create_openai_functions_agent(llm, tools, p)
agent_executor = AgentExecutor(agent = agent, tools = tools, verbose = True)
output = agent_executor.invoke({"input": prompt})
#completion = agent(prompt)
#output = completion["output"]
except Exception as e:
err_msg = e
raise gr.Error(e)
return output
description = """<a href='https://www.gradio.app/'>Gradio</a> UI using the <a href='https://openai.com/'>OpenAI</a> API
with <a href='https://openai.com/research/gpt-4'>gpt-4</a> model."""
gr.close_all()
demo = gr.Interface(fn = invoke,
inputs = [gr.Textbox(label = "OpenAI API Key", type = "password", lines = 1, value = "sk-"),
gr.Textbox(label = "Prompt", lines = 1, value = "What is today's date?"),
gr.Radio([AGENT_OFF, AGENT_ON], label = "Use Agent", value = AGENT_OFF)],
outputs = [gr.Textbox(label = "Completion", lines = 1)],
title = "Generative AI - LLM & Agent",
description = description,
examples = [["sk-", "What is today's date?", AGENT_ON],
["sk-", "What is the weather in SF?", AGENT_ON]],
cache_examples = False)
demo.launch() |