Pranav0111 commited on
Commit
480d3e2
Β·
verified Β·
1 Parent(s): 67ac5b4

Update chatbot.py

Browse files
Files changed (1) hide show
  1. chatbot.py +63 -69
chatbot.py CHANGED
@@ -2,23 +2,32 @@ import streamlit as st
2
  import pandas as pd
3
  import os
4
  from datetime import datetime
5
- import google.generativeai as genai
6
 
7
- # Configure Gemini
8
- GEMINI_API_KEY = os.environ.get('GEMINI_API_KEY')
9
- genai.configure(api_key=GEMINI_API_KEY)
10
- model = genai.GenerativeModel('gemini-pro')
 
11
 
12
- class GeminiDataChatbot:
13
  def __init__(self):
 
 
 
 
 
 
14
  if 'uploaded_df' not in st.session_state:
15
  st.session_state.uploaded_df = None
16
  if 'chat_history' not in st.session_state:
17
  st.session_state.chat_history = []
18
 
19
- def render_interface(self):
20
- st.title("πŸ“Š Data Analysis Chatbot")
21
- st.write("Upload your CSV file and ask questions about your data")
 
 
 
22
 
23
  # File upload section
24
  uploaded_file = st.file_uploader("Choose a CSV file", type="csv")
@@ -31,6 +40,7 @@ class GeminiDataChatbot:
31
  self._render_chat_window()
32
 
33
  def _process_uploaded_file(self, uploaded_file):
 
34
  try:
35
  df = pd.read_csv(uploaded_file)
36
  st.session_state.uploaded_df = df
@@ -39,16 +49,10 @@ class GeminiDataChatbot:
39
  with st.expander("View Data Preview"):
40
  st.dataframe(df.head())
41
 
42
- # Initial analysis prompt
43
- initial_prompt = f"""
44
- I have uploaded a dataset with {len(df)} rows and {len(df.columns)} columns.
45
- Columns: {', '.join(df.columns)}.
46
- First give a very brief (2-3 sentence) overview of what this data might contain.
47
- Then suggest 3 specific questions I could ask about this data.
48
- """
49
-
50
- with st.spinner("Analyzing your data..."):
51
- response = self._generate_gemini_response(initial_prompt, df)
52
  st.session_state.chat_history.append({
53
  "role": "assistant",
54
  "content": response
@@ -58,6 +62,7 @@ class GeminiDataChatbot:
58
  st.error(f"Error processing file: {str(e)}")
59
 
60
  def _render_chat_window(self):
 
61
  st.subheader("Chat About Your Data")
62
 
63
  # Display chat history
@@ -67,55 +72,44 @@ class GeminiDataChatbot:
67
 
68
  # User input
69
  if prompt := st.chat_input("Ask about your data..."):
70
- # Add user message to chat history
71
- st.session_state.chat_history.append({"role": "user", "content": prompt})
72
-
73
- # Display user message
74
- with st.chat_message("user"):
75
- st.markdown(prompt)
76
-
77
- # Generate and display assistant response
78
- with st.chat_message("assistant"):
79
- with st.spinner("Thinking..."):
80
- response = self._generate_gemini_response(prompt, st.session_state.uploaded_df)
81
- st.markdown(response)
82
-
83
- # Add assistant response to chat history
84
- st.session_state.chat_history.append({"role": "assistant", "content": response})
85
 
86
- def _generate_gemini_response(self, prompt: str, df: pd.DataFrame) -> str:
87
- """Generate response using Gemini API with data context"""
88
- try:
89
- # Create data summary for context
90
- data_summary = f"""
91
- Data Summary:
92
- - Shape: {df.shape}
93
- - Columns: {', '.join(df.columns)}
94
- - First 5 rows:
95
- {df.head().to_markdown()}
96
- """
97
-
98
- # Create prompt with context
99
- full_prompt = f"""
100
- You are a data analysis assistant. The user has uploaded a dataset with the following characteristics:
101
- {data_summary}
102
-
103
- User Question: {prompt}
104
-
105
- Provide a detailed response answering their question about the data. If appropriate, include:
106
- - Relevant statistics
107
- - Potential visualizations that would help
108
- - Any data quality issues to consider
109
- - Business insights if applicable
110
- """
111
-
112
- response = model.generate_content(full_prompt)
113
- return response.text
114
 
115
- except Exception as e:
116
- return f"Sorry, I encountered an error processing your request: {str(e)}"
117
-
118
- # Initialize and run the chatbot
119
- if __name__ == "__main__":
120
- chatbot = GeminiDataChatbot()
121
- chatbot.render_interface()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  import pandas as pd
3
  import os
4
  from datetime import datetime
 
5
 
6
+ try:
7
+ import google.generativeai as genai
8
+ GEMINI_AVAILABLE = True
9
+ except ImportError:
10
+ GEMINI_AVAILABLE = False
11
 
12
+ class ChatbotManager:
13
  def __init__(self):
14
+ if GEMINI_AVAILABLE and 'GEMINI_API_KEY' in os.environ:
15
+ genai.configure(api_key=os.environ['GEMINI_API_KEY'])
16
+ self.model = genai.GenerativeModel('gemini-pro')
17
+ else:
18
+ self.model = None
19
+
20
  if 'uploaded_df' not in st.session_state:
21
  st.session_state.uploaded_df = None
22
  if 'chat_history' not in st.session_state:
23
  st.session_state.chat_history = []
24
 
25
+ def render_chat_interface(self):
26
+ """Render the main chat interface"""
27
+ st.header("πŸ“Š Data Analysis Chatbot")
28
+
29
+ if not GEMINI_AVAILABLE:
30
+ st.warning("Gemini API not available - running in limited mode")
31
 
32
  # File upload section
33
  uploaded_file = st.file_uploader("Choose a CSV file", type="csv")
 
40
  self._render_chat_window()
41
 
42
  def _process_uploaded_file(self, uploaded_file):
43
+ """Process the uploaded CSV file"""
44
  try:
45
  df = pd.read_csv(uploaded_file)
46
  st.session_state.uploaded_df = df
 
49
  with st.expander("View Data Preview"):
50
  st.dataframe(df.head())
51
 
52
+ # Initial analysis
53
+ if self.model:
54
+ initial_prompt = f"Briefly describe this dataset with {len(df)} rows and {len(df.columns)} columns."
55
+ response = self._generate_response(initial_prompt)
 
 
 
 
 
 
56
  st.session_state.chat_history.append({
57
  "role": "assistant",
58
  "content": response
 
62
  st.error(f"Error processing file: {str(e)}")
63
 
64
  def _render_chat_window(self):
65
+ """Render the chat conversation window"""
66
  st.subheader("Chat About Your Data")
67
 
68
  # Display chat history
 
72
 
73
  # User input
74
  if prompt := st.chat_input("Ask about your data..."):
75
+ self._handle_user_input(prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
 
77
+ def _handle_user_input(self, prompt):
78
+ """Handle user input and generate response"""
79
+ # Add user message to chat history
80
+ st.session_state.chat_history.append({"role": "user", "content": prompt})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
 
82
+ # Display user message
83
+ with st.chat_message("user"):
84
+ st.markdown(prompt)
85
+
86
+ # Generate and display assistant response
87
+ with st.chat_message("assistant"):
88
+ with st.spinner("Thinking..."):
89
+ response = self._generate_response(prompt)
90
+ st.markdown(response)
91
+
92
+ # Add assistant response to chat history
93
+ st.session_state.chat_history.append({"role": "assistant", "content": response})
94
+
95
+ def _generate_response(self, prompt: str) -> str:
96
+ """Generate response using available backend"""
97
+ df = st.session_state.uploaded_df
98
+
99
+ if self.model:
100
+ # Use Gemini if available
101
+ try:
102
+ data_summary = f"Data: {len(df)} rows, columns: {', '.join(df.columns)}"
103
+ full_prompt = f"{data_summary}\n\nUser question: {prompt}"
104
+ response = self.model.generate_content(full_prompt)
105
+ return response.text
106
+ except Exception as e:
107
+ return f"Gemini error: {str(e)}"
108
+ else:
109
+ # Fallback basic analysis
110
+ if "summary" in prompt.lower():
111
+ return f"Basic summary:\n{df.describe().to_markdown()}"
112
+ elif "columns" in prompt.lower():
113
+ return f"Columns: {', '.join(df.columns)}"
114
+ else:
115
+ return "I can provide basic info about columns and summary statistics."