Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -21,9 +21,11 @@ def safe_rerun():
|
|
21 |
openai_client = None
|
22 |
genai_client = None # Using 'genai_client' to store the configured module/client
|
23 |
deepseek_api_key = None
|
24 |
-
|
|
|
25 |
secret_errors = [] # Store errors for display
|
26 |
|
|
|
27 |
try:
|
28 |
openai_api_key = st.secrets.get("OPENAI_API_KEY")
|
29 |
if openai_api_key:
|
@@ -36,6 +38,7 @@ except KeyError:
|
|
36 |
except Exception as e:
|
37 |
secret_errors.append(f"Error initializing OpenAI client: {e}")
|
38 |
|
|
|
39 |
try:
|
40 |
gemini_api_key = st.secrets.get("GEMINI_API_KEY")
|
41 |
if gemini_api_key:
|
@@ -49,6 +52,7 @@ except KeyError:
|
|
49 |
except Exception as e:
|
50 |
secret_errors.append(f"Error initializing Google GenAI client: {e}")
|
51 |
|
|
|
52 |
try:
|
53 |
deepseek_api_key = st.secrets.get("DEEPSEEK_API_KEY")
|
54 |
if deepseek_api_key:
|
@@ -60,12 +64,25 @@ except KeyError:
|
|
60 |
except Exception as e:
|
61 |
secret_errors.append(f"Error reading DeepSeek API key: {e}")
|
62 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
# Check if at least one API key is loaded
|
64 |
any_secret_loaded = any(secrets_available.values())
|
65 |
|
66 |
# ---------- Model Configuration ----------
|
67 |
SUPPORTED_MODELS = {}
|
68 |
|
|
|
69 |
if secrets_available["openai"] and openai_client:
|
70 |
SUPPORTED_MODELS.update({
|
71 |
"GPT-4o (OpenAI)": {"id": "gpt-4o", "provider": "openai", "client": openai_client},
|
@@ -77,6 +94,7 @@ if secrets_available["openai"] and openai_client:
|
|
77 |
"ChatGPT o3 (OpenAI)": {"id": "chatgpt-o3", "provider": "openai", "client": openai_client},
|
78 |
})
|
79 |
|
|
|
80 |
if secrets_available["gemini"] and genai_client:
|
81 |
SUPPORTED_MODELS.update({
|
82 |
"Gemini 2.0 (Google)": {"id": "gemini-2.0-latest", "provider": "gemini", "client": genai_client},
|
@@ -86,12 +104,20 @@ if secrets_available["gemini"] and genai_client:
|
|
86 |
"Gemini 1.0 Pro (Google)": {"id": "gemini-1.0-pro", "provider": "gemini", "client": genai_client},
|
87 |
})
|
88 |
|
|
|
89 |
if secrets_available["deepseek"] and deepseek_api_key:
|
90 |
SUPPORTED_MODELS.update({
|
91 |
"DeepSeek Chat": {"id": "deepseek-chat", "provider": "deepseek", "client": None},
|
92 |
"DeepSeek Coder": {"id": "deepseek-coder", "provider": "deepseek", "client": None},
|
93 |
})
|
94 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
DEFAULT_MODEL_PREFERENCE = [
|
96 |
"GPT-4o Mini (OpenAI)",
|
97 |
"Gemini 1.5 Flash (Google)",
|
@@ -102,7 +128,7 @@ DEFAULT_MODEL = next((m for m in DEFAULT_MODEL_PREFERENCE if m in SUPPORTED_MODE
|
|
102 |
if not DEFAULT_MODEL and SUPPORTED_MODELS:
|
103 |
DEFAULT_MODEL = next(iter(SUPPORTED_MODELS))
|
104 |
|
105 |
-
# ---------- Helper Functions ----------
|
106 |
|
107 |
def _generate_with_openai_provider(client, model_id, prompt, max_tokens):
|
108 |
try:
|
@@ -177,6 +203,35 @@ def _generate_with_deepseek_provider(api_key, model_id, prompt, max_tokens):
|
|
177 |
st.error(f"β DeepSeek Error processing response ({model_id}): {e}")
|
178 |
return f"Error: DeepSeek processing failed for {model_id}. Details: {e}"
|
179 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
def generate_with_selected_model(selected_model_name, prompt, max_tokens=2000):
|
181 |
if not any_secret_loaded or not SUPPORTED_MODELS:
|
182 |
return "Error: No API keys loaded or models available."
|
@@ -210,6 +265,11 @@ def generate_with_selected_model(selected_model_name, prompt, max_tokens=2000):
|
|
210 |
result = f"Error: DeepSeek API key not available for {selected_model_name}."
|
211 |
else:
|
212 |
result = _generate_with_deepseek_provider(deepseek_api_key, model_id, prompt, max_tokens)
|
|
|
|
|
|
|
|
|
|
|
213 |
else:
|
214 |
st.error(f"Unknown provider '{provider}' configured for model '{selected_model_name}'.")
|
215 |
result = f"Error: Unknown provider {provider}."
|
|
|
21 |
openai_client = None
|
22 |
genai_client = None # Using 'genai_client' to store the configured module/client
|
23 |
deepseek_api_key = None
|
24 |
+
claude_api_key = None
|
25 |
+
secrets_available = {"openai": False, "gemini": False, "deepseek": False, "claude": False}
|
26 |
secret_errors = [] # Store errors for display
|
27 |
|
28 |
+
# OpenAI API Key
|
29 |
try:
|
30 |
openai_api_key = st.secrets.get("OPENAI_API_KEY")
|
31 |
if openai_api_key:
|
|
|
38 |
except Exception as e:
|
39 |
secret_errors.append(f"Error initializing OpenAI client: {e}")
|
40 |
|
41 |
+
# Gemini API Key (Google GenAI)
|
42 |
try:
|
43 |
gemini_api_key = st.secrets.get("GEMINI_API_KEY")
|
44 |
if gemini_api_key:
|
|
|
52 |
except Exception as e:
|
53 |
secret_errors.append(f"Error initializing Google GenAI client: {e}")
|
54 |
|
55 |
+
# DeepSeek API Key
|
56 |
try:
|
57 |
deepseek_api_key = st.secrets.get("DEEPSEEK_API_KEY")
|
58 |
if deepseek_api_key:
|
|
|
64 |
except Exception as e:
|
65 |
secret_errors.append(f"Error reading DeepSeek API key: {e}")
|
66 |
|
67 |
+
# CLAUDE API Key (Anthropic)
|
68 |
+
try:
|
69 |
+
claude_api_key = st.secrets.get("CLAUDE_API_KEY")
|
70 |
+
if claude_api_key:
|
71 |
+
secrets_available["claude"] = True
|
72 |
+
else:
|
73 |
+
secret_errors.append("Streamlit Secret `CLAUDE_API_KEY` not found.")
|
74 |
+
except KeyError:
|
75 |
+
secret_errors.append("Streamlit Secret `CLAUDE_API_KEY` not found.")
|
76 |
+
except Exception as e:
|
77 |
+
secret_errors.append(f"Error reading CLAUDE API key: {e}")
|
78 |
+
|
79 |
# Check if at least one API key is loaded
|
80 |
any_secret_loaded = any(secrets_available.values())
|
81 |
|
82 |
# ---------- Model Configuration ----------
|
83 |
SUPPORTED_MODELS = {}
|
84 |
|
85 |
+
# OpenAI Models
|
86 |
if secrets_available["openai"] and openai_client:
|
87 |
SUPPORTED_MODELS.update({
|
88 |
"GPT-4o (OpenAI)": {"id": "gpt-4o", "provider": "openai", "client": openai_client},
|
|
|
94 |
"ChatGPT o3 (OpenAI)": {"id": "chatgpt-o3", "provider": "openai", "client": openai_client},
|
95 |
})
|
96 |
|
97 |
+
# Gemini Models (Google)
|
98 |
if secrets_available["gemini"] and genai_client:
|
99 |
SUPPORTED_MODELS.update({
|
100 |
"Gemini 2.0 (Google)": {"id": "gemini-2.0-latest", "provider": "gemini", "client": genai_client},
|
|
|
104 |
"Gemini 1.0 Pro (Google)": {"id": "gemini-1.0-pro", "provider": "gemini", "client": genai_client},
|
105 |
})
|
106 |
|
107 |
+
# DeepSeek Models
|
108 |
if secrets_available["deepseek"] and deepseek_api_key:
|
109 |
SUPPORTED_MODELS.update({
|
110 |
"DeepSeek Chat": {"id": "deepseek-chat", "provider": "deepseek", "client": None},
|
111 |
"DeepSeek Coder": {"id": "deepseek-coder", "provider": "deepseek", "client": None},
|
112 |
})
|
113 |
|
114 |
+
# Claude Models (Anthropic) β Using the CLAUDE_API_KEY
|
115 |
+
if secrets_available["claude"] and claude_api_key:
|
116 |
+
SUPPORTED_MODELS.update({
|
117 |
+
"Claude 3.7 Sonnet (Anthropic)": {"id": "claude-3-7-sonnet-20250219", "provider": "claude", "client": None},
|
118 |
+
"Claude 3.5 Haiku (Anthropic)": {"id": "claude-3-5-haiku-20241022", "provider": "claude", "client": None},
|
119 |
+
})
|
120 |
+
|
121 |
DEFAULT_MODEL_PREFERENCE = [
|
122 |
"GPT-4o Mini (OpenAI)",
|
123 |
"Gemini 1.5 Flash (Google)",
|
|
|
128 |
if not DEFAULT_MODEL and SUPPORTED_MODELS:
|
129 |
DEFAULT_MODEL = next(iter(SUPPORTED_MODELS))
|
130 |
|
131 |
+
# ---------- Helper Functions for Generation ----------
|
132 |
|
133 |
def _generate_with_openai_provider(client, model_id, prompt, max_tokens):
|
134 |
try:
|
|
|
203 |
st.error(f"β DeepSeek Error processing response ({model_id}): {e}")
|
204 |
return f"Error: DeepSeek processing failed for {model_id}. Details: {e}"
|
205 |
|
206 |
+
def _generate_with_claude_provider(api_key, model_id, prompt, max_tokens):
|
207 |
+
headers = {
|
208 |
+
"Content-Type": "application/json",
|
209 |
+
"x-api-key": api_key
|
210 |
+
}
|
211 |
+
# Anthropic's API expects a prompt starting with "Human:" and the assistant's reply follows after "Assistant:"
|
212 |
+
payload = {
|
213 |
+
"model": model_id,
|
214 |
+
"prompt": f"Human: {prompt}\n\nAssistant:",
|
215 |
+
"max_tokens_to_sample": max_tokens,
|
216 |
+
"stop_sequences": ["\nHuman:"],
|
217 |
+
"temperature": 0.7
|
218 |
+
}
|
219 |
+
try:
|
220 |
+
response = requests.post("https://api.anthropic.com/v1/complete", headers=headers, json=payload, timeout=90)
|
221 |
+
response.raise_for_status()
|
222 |
+
response_data = response.json()
|
223 |
+
if "completion" in response_data:
|
224 |
+
return response_data["completion"]
|
225 |
+
else:
|
226 |
+
st.warning(f"Claude returned unexpected response structure: {response_data}")
|
227 |
+
return f"Error: Claude returned an unexpected structure."
|
228 |
+
except requests.exceptions.RequestException as e:
|
229 |
+
st.error(f"β Claude API Request Error ({model_id}): {e}")
|
230 |
+
return f"Error: Claude API request failed for {model_id}. Details: {e}"
|
231 |
+
except Exception as e:
|
232 |
+
st.error(f"β Claude Error processing response ({model_id}): {e}")
|
233 |
+
return f"Error: Claude processing failed for {model_id}. Details: {e}"
|
234 |
+
|
235 |
def generate_with_selected_model(selected_model_name, prompt, max_tokens=2000):
|
236 |
if not any_secret_loaded or not SUPPORTED_MODELS:
|
237 |
return "Error: No API keys loaded or models available."
|
|
|
265 |
result = f"Error: DeepSeek API key not available for {selected_model_name}."
|
266 |
else:
|
267 |
result = _generate_with_deepseek_provider(deepseek_api_key, model_id, prompt, max_tokens)
|
268 |
+
elif provider == "claude":
|
269 |
+
if not claude_api_key:
|
270 |
+
result = f"Error: CLAUDE_API_KEY not available for {selected_model_name}."
|
271 |
+
else:
|
272 |
+
result = _generate_with_claude_provider(claude_api_key, model_id, prompt, max_tokens)
|
273 |
else:
|
274 |
st.error(f"Unknown provider '{provider}' configured for model '{selected_model_name}'.")
|
275 |
result = f"Error: Unknown provider {provider}."
|