Spaces:
Sleeping
Sleeping
File size: 7,058 Bytes
66ac839 f4f5e89 cca16c9 6de6c8c f4f5e89 cca16c9 f4f5e89 cca16c9 f4f5e89 cca16c9 f4f5e89 cca16c9 f4f5e89 6de6c8c f4f5e89 6de6c8c f4f5e89 6de6c8c 1a64a9e cca16c9 6de6c8c e448a2e cca16c9 6de6c8c 1a64a9e 6de6c8c cca16c9 6de6c8c f4f5e89 6de6c8c f4f5e89 6de6c8c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 |
import gradio as gr
import anthropic
import json
import logging
from tool_handler import process_tool_call, tools
from config import SYSTEM_PROMPT, API_KEY, MODEL_NAME
from datasets import load_dataset
import pandas as pd
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# Initialize Anthropoc client with API key
client = anthropic.Client(api_key=API_KEY)
def simple_chat(user_message, history):
# Reconstruct the message history
messages = []
for i, (user_msg, assistant_msg) in enumerate(history):
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": assistant_msg})
messages.append({"role": "user", "content": user_message})
full_response = ""
MAX_ITERATIONS = 5
iteration_count = 0
while iteration_count < MAX_ITERATIONS:
try:
logger.info(f"Sending messages to LLM API: {json.dumps(messages, indent=2)}")
response = client.messages.create(
model=MODEL_NAME,
system=SYSTEM_PROMPT,
max_tokens=4096,
tools=tools,
messages=messages,
)
logger.info(f"LLM API response: {json.dumps(response.to_dict(), indent=2)}")
assistant_message = response.content[0].text if isinstance(response.content, list) else response.content
if response.stop_reason == "tool_use":
tool_use = response.content[-1]
tool_name = tool_use.name
tool_input = tool_use.input
tool_result = process_tool_call(tool_name, tool_input)
# Add assistant message indicating tool use
messages.append({"role": "assistant", "content": assistant_message})
# Add user message with tool result to maintain role alternation
messages.append({
"role": "user",
"content": json.dumps({
"type": "tool_result",
"tool_use_id": tool_use.id,
"content": tool_result,
})
})
full_response += f"\nUsing tool: {tool_name}\n"
iteration_count += 1
continue
else:
# Add the assistant's reply to the full response
full_response += assistant_message
messages.append({"role": "assistant", "content": assistant_message})
break
except anthropic.BadRequestError as e:
logger.error(f"BadRequestError: {str(e)}")
full_response = f"Error: {str(e)}"
break
except Exception as e:
logger.error(f"Unexpected error: {str(e)}")
full_response = f"An unexpected error occurred: {str(e)}"
break
logger.info(f"Final messages: {json.dumps(messages, indent=2)}")
if iteration_count == MAX_ITERATIONS:
logger.warning("Maximum iterations reached in simple_chat")
history.append((user_message, full_response))
return history, "", messages # Return messages as well
def messages_to_dataframe(messages):
data = []
for msg in messages:
row = {
'role': msg['role'],
'content': msg['content'] if isinstance(msg['content'], str) else json.dumps(msg['content']),
'tool_use': None,
'tool_result': None
}
if msg['role'] == 'assistant' and isinstance(msg['content'], list):
for item in msg['content']:
if isinstance(item, dict) and 'type' in item:
if item['type'] == 'tool_use':
row['tool_use'] = json.dumps(item)
elif item['type'] == 'tool_result':
row['tool_result'] = json.dumps(item)
data.append(row)
return pd.DataFrame(data)
def submit_message(message, history):
history, _, messages = simple_chat(message, history)
df = messages_to_dataframe(messages)
print(df) # For console output
return history, "", df
def load_customers_dataset():
dataset = load_dataset("dwb2023/blackbird-customers", split="train")
df = pd.DataFrame(dataset)
return df
def load_orders_dataset():
dataset = load_dataset("dwb2023/blackbird-orders", split="train")
df = pd.DataFrame(dataset)
return df
example_inputs = [
"Can you confirm my username? My email is [email protected].",
"Can you send me a list of my recent orders? My phone number is 222-333-4444.",
"I need to confirm my current user info and order status. My username is liamn.",
"I'm checking on the status of an order, the order id is 74651.",
"I need to cancel Order ID...",
"I lost my phone and need to update my contact information. My user id is...",
]
# Create Gradio App
app = gr.Blocks(theme="sudeepshouche/minimalist")
with app:
with gr.Tab("Chatbot"):
gr.Markdown("# πΎ Scooby Snacks -- Customer Support Chat πͺ")
gr.Markdown("## π Leveraging **Claude Sonnet 3.5** for Microservice-Based Function Calling π§")
gr.Markdown("FastAPI Backend - runing on Docker: [blackbird-svc](https://huggingface.co/spaces/dwb2023/blackbird-svc)")
gr.Markdown("Data Sources - HF Datasets: [blackbird-customers](https://huggingface.co/datasets/dwb2023/blackbird-customers) [blackbird-orders](https://huggingface.co/datasets/dwb2023/blackbird-orders)")
with gr.Row():
with gr.Column():
msg = gr.Textbox(label="Your message")
gr.Markdown("β¬οΈ checkout the *Customers* and *Orders* tabs above π for sample email addresses, order ids, etc.")
examples = gr.Examples(
examples=example_inputs,
inputs=msg
)
submit = gr.Button("Submit", variant="primary")
clear = gr.Button("Clear", variant="secondary")
with gr.Column():
chatbot = gr.Chatbot()
df_output = gr.Dataframe(label="Conversation Analysis")
def handle_submit(message, history):
return submit_message(message, history)
submit_event = msg.submit(handle_submit, [msg, chatbot], [chatbot, msg, df_output]).then(
lambda: "", None, msg
)
submit.click(submit_message, [msg, chatbot], [chatbot, msg, df_output], show_progress="full").then(
lambda: "", None, msg
)
clear.click(lambda: None, None, chatbot, queue=False)
with gr.Tab("Customers"):
customers_df = gr.Dataframe(load_customers_dataset(), label="Customers Data")
with gr.Tab("Orders"):
orders_df = gr.Dataframe(load_orders_dataset(), label="Orders Data")
if __name__ == "__main__":
app.launch()
|