Update app.py
Browse files
app.py
CHANGED
@@ -23,12 +23,7 @@ app.add_middleware(
|
|
23 |
allow_methods=["*"],
|
24 |
allow_headers=["*"],
|
25 |
)
|
26 |
-
|
27 |
-
"keyless-gpt-4o-mini": "gpt-4o-mini",
|
28 |
-
"keyless-claude-3-haiku": "claude-3-haiku-20240307",
|
29 |
-
"keyless-mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
30 |
-
"keyless-meta-Llama-3.1-70B-Instruct-Turbo": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo"
|
31 |
-
}
|
32 |
|
33 |
class ModelInfo(BaseModel):
|
34 |
id: str
|
@@ -194,11 +189,49 @@ async def chat_with_duckduckgo(query: str, model: str, conversation_history: Lis
|
|
194 |
logging.error(f"Unexpected error in chat_with_duckduckgo: {str(e)}")
|
195 |
raise HTTPException(status_code=500, detail=f"Unexpected error: {str(e)}")
|
196 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
197 |
@app.get("/v1/models")
|
198 |
-
async def list_models():
|
199 |
-
|
200 |
-
|
201 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
202 |
|
203 |
@app.post("/v1/chat/completions")
|
204 |
async def chat_completion(request: ChatCompletionRequest):
|
|
|
23 |
allow_methods=["*"],
|
24 |
allow_headers=["*"],
|
25 |
)
|
26 |
+
|
|
|
|
|
|
|
|
|
|
|
27 |
|
28 |
class ModelInfo(BaseModel):
|
29 |
id: str
|
|
|
189 |
logging.error(f"Unexpected error in chat_with_duckduckgo: {str(e)}")
|
190 |
raise HTTPException(status_code=500, detail=f"Unexpected error: {str(e)}")
|
191 |
|
192 |
+
from typing import Dict, List
|
193 |
+
|
194 |
+
MODEL_MAPPING: Dict[str, Dict[str, str]] = {
|
195 |
+
"keyless-gpt-4o-mini": {
|
196 |
+
"backend_model": "gpt-4o-mini",
|
197 |
+
"owned_by": "openai"
|
198 |
+
},
|
199 |
+
"keyless-claude-3-haiku": {
|
200 |
+
"backend_model": "claude-3-haiku-20240307",
|
201 |
+
"owned_by": "anthropic"
|
202 |
+
},
|
203 |
+
"keyless-mixtral-8x7b": {
|
204 |
+
"backend_model": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
205 |
+
"owned_by": "mistralai"
|
206 |
+
},
|
207 |
+
"keyless-meta-Llama-3.1-70B-Instruct-Turbo": {
|
208 |
+
"backend_model": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
|
209 |
+
"owned_by": "meta"
|
210 |
+
}
|
211 |
+
}
|
212 |
+
|
213 |
@app.get("/v1/models")
|
214 |
+
async def list_models() -> Dict[str, List[Dict[str, any]]]:
|
215 |
+
"""
|
216 |
+
List available models with their metadata.
|
217 |
+
|
218 |
+
Returns:
|
219 |
+
Dict containing a list of model information objects.
|
220 |
+
"""
|
221 |
+
current_timestamp = int(datetime.now().timestamp())
|
222 |
+
|
223 |
+
models = [
|
224 |
+
{
|
225 |
+
"id": model_id,
|
226 |
+
"object": "model",
|
227 |
+
"created": current_timestamp,
|
228 |
+
"owned_by": model_info["owned_by"],
|
229 |
+
"backend_model": model_info["backend_model"]
|
230 |
+
}
|
231 |
+
for model_id, model_info in MODEL_MAPPING.items()
|
232 |
+
]
|
233 |
+
|
234 |
+
return {"data": models}
|
235 |
|
236 |
@app.post("/v1/chat/completions")
|
237 |
async def chat_completion(request: ChatCompletionRequest):
|