Niansuh commited on
Commit
923acec
·
verified ·
1 Parent(s): 4990ab8

Delete api

Browse files
Files changed (9) hide show
  1. api/__init__.py +0 -0
  2. api/app.py +0 -42
  3. api/auth.py +0 -12
  4. api/config.py +0 -8
  5. api/logger.py +0 -22
  6. api/models.py +0 -16
  7. api/providers.py +0 -159
  8. api/routes.py +0 -69
  9. api/utils.py +0 -90
api/__init__.py DELETED
File without changes
api/app.py DELETED
@@ -1,42 +0,0 @@
1
- # api/app.py
2
-
3
- from fastapi import FastAPI, Request
4
- from starlette.middleware.cors import CORSMiddleware
5
- from fastapi.responses import JSONResponse
6
- from api.logger import setup_logger
7
- from api.routes import router
8
-
9
- logger = setup_logger(__name__)
10
-
11
- def create_app():
12
- app = FastAPI(
13
- title="AmigoChat API Gateway",
14
- docs_url=None, # Disable Swagger UI
15
- redoc_url=None, # Disable ReDoc
16
- openapi_url=None, # Disable OpenAPI schema
17
- )
18
-
19
- # CORS settings
20
- app.add_middleware(
21
- CORSMiddleware,
22
- allow_origins=["*"], # Adjust as needed for security
23
- allow_credentials=True,
24
- allow_methods=["*"],
25
- allow_headers=["*"],
26
- )
27
-
28
- # Include routes
29
- app.include_router(router)
30
-
31
- # Global exception handler for better error reporting
32
- @app.exception_handler(Exception)
33
- async def global_exception_handler(request: Request, exc: Exception):
34
- logger.error(f"An error occurred: {str(exc)}")
35
- return JSONResponse(
36
- status_code=500,
37
- content={"message": "An internal server error occurred."},
38
- )
39
-
40
- return app
41
-
42
- app = create_app()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
api/auth.py DELETED
@@ -1,12 +0,0 @@
1
- # api/auth.py
2
-
3
- from fastapi import Depends, HTTPException
4
- from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
5
- from api.config import APP_SECRET
6
-
7
- security = HTTPBearer()
8
-
9
- def verify_app_secret(credentials: HTTPAuthorizationCredentials = Depends(security)):
10
- if credentials.credentials != APP_SECRET:
11
- raise HTTPException(status_code=403, detail="Invalid APP_SECRET")
12
- return credentials.credentials
 
 
 
 
 
 
 
 
 
 
 
 
 
api/config.py DELETED
@@ -1,8 +0,0 @@
1
- # api/config.py
2
-
3
- import os
4
- from dotenv import load_dotenv
5
-
6
- load_dotenv()
7
-
8
- APP_SECRET = os.getenv("APP_SECRET")
 
 
 
 
 
 
 
 
 
api/logger.py DELETED
@@ -1,22 +0,0 @@
1
- # api/logger.py
2
-
3
- import logging
4
-
5
- def setup_logger(name):
6
- logger = logging.getLogger(name)
7
- if not logger.handlers:
8
- logger.setLevel(logging.INFO)
9
- formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
10
-
11
- # Console handler
12
- console_handler = logging.StreamHandler()
13
- console_handler.setFormatter(formatter)
14
- logger.addHandler(console_handler)
15
-
16
- # File Handler - Error Level (Optional)
17
- # error_file_handler = logging.FileHandler('error.log')
18
- # error_file_handler.setFormatter(formatter)
19
- # error_file_handler.setLevel(logging.ERROR)
20
- # logger.addHandler(error_file_handler)
21
-
22
- return logger
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
api/models.py DELETED
@@ -1,16 +0,0 @@
1
- # api/models.py
2
-
3
- from typing import List, Optional
4
- from pydantic import BaseModel
5
-
6
- class Message(BaseModel):
7
- role: str
8
- content: str
9
-
10
- class ChatRequest(BaseModel):
11
- model: str
12
- messages: List[Message]
13
- stream: Optional[bool] = False
14
- temperature: Optional[float] = 0.5
15
- top_p: Optional[float] = 0.95
16
- max_tokens: Optional[int] = 4000
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
api/providers.py DELETED
@@ -1,159 +0,0 @@
1
- # api/providers.py
2
-
3
- from __future__ import annotations
4
-
5
- import json
6
- import uuid
7
- from aiohttp import ClientSession, ClientTimeout
8
- from typing import AsyncGenerator, List, Dict, Any, Union
9
-
10
- from api.logger import setup_logger
11
-
12
- logger = setup_logger(__name__)
13
-
14
- class AmigoChat:
15
- url = "https://amigochat.io"
16
- chat_api_endpoint = "https://api.amigochat.io/v1/chat/completions"
17
- image_api_endpoint = "https://api.amigochat.io/v1/images/generations"
18
- default_model = 'gpt-4o-mini'
19
-
20
- chat_models = [
21
- 'gpt-4o',
22
- default_model,
23
- 'o1-preview',
24
- 'o1-mini',
25
- 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo',
26
- 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
27
- 'claude-3-sonnet-20240229',
28
- 'gemini-1.5-pro',
29
- ]
30
-
31
- image_models = [
32
- 'flux-pro/v1.1',
33
- 'flux-realism',
34
- 'flux-pro',
35
- 'dalle-e-3',
36
- ]
37
-
38
- models = chat_models + image_models
39
-
40
- model_aliases = {
41
- "o1": "o1-preview",
42
- "llama-3.1-405b": "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo",
43
- "llama-3.2-90b": "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo",
44
- "claude-3.5-sonnet": "claude-3-sonnet-20240229",
45
- "gemini-pro": "gemini-1.5-pro",
46
- "flux-pro": "flux-pro/v1.1",
47
- "dalle-3": "dalle-e-3",
48
- }
49
-
50
- persona_ids = {
51
- 'gpt-4o': "gpt",
52
- 'gpt-4o-mini': "amigo",
53
- 'o1-preview': "openai-o-one",
54
- 'o1-mini': "openai-o-one-mini",
55
- 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo': "llama-three-point-one",
56
- 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo': "llama-3-2",
57
- 'claude-3-sonnet-20240229': "claude",
58
- 'gemini-1.5-pro': "gemini-1-5-pro",
59
- 'flux-pro/v1.1': "flux-1-1-pro",
60
- 'flux-realism': "flux-realism",
61
- 'flux-pro': "flux-pro",
62
- 'dalle-e-3': "dalle-three",
63
- }
64
-
65
- @classmethod
66
- def get_model(cls, model: str) -> str:
67
- return cls.model_aliases.get(model, model if model in cls.models else cls.default_model)
68
-
69
- @classmethod
70
- def get_persona_id(cls, model: str) -> str:
71
- return cls.persona_ids.get(model, "amigo")
72
-
73
- @classmethod
74
- async def generate_response(
75
- cls,
76
- model: str,
77
- messages: List[Dict[str, Any]],
78
- stream: bool = False,
79
- proxy: str = None,
80
- ) -> Union[AsyncGenerator[str, None], str]:
81
- model = cls.get_model(model)
82
- device_uuid = str(uuid.uuid4())
83
-
84
- headers = {
85
- "accept": "*/*",
86
- "accept-language": "en-US,en;q=0.9",
87
- "authorization": "Bearer",
88
- "cache-control": "no-cache",
89
- "content-type": "application/json",
90
- "origin": cls.url,
91
- "pragma": "no-cache",
92
- "priority": "u=1, i",
93
- "referer": f"{cls.url}/",
94
- "sec-ch-ua": '"Chromium";v="129", "Not=A?Brand";v="8"',
95
- "sec-ch-ua-mobile": "?0",
96
- "sec-ch-ua-platform": '"Linux"',
97
- "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
98
- "x-device-language": "en-US",
99
- "x-device-platform": "web",
100
- "x-device-uuid": device_uuid,
101
- "x-device-version": "1.0.32"
102
- }
103
-
104
- async with ClientSession(headers=headers) as session:
105
- if model in cls.chat_models:
106
- data = {
107
- "messages": messages,
108
- "model": model,
109
- "personaId": cls.get_persona_id(model),
110
- "frequency_penalty": 0,
111
- "max_tokens": 4000,
112
- "presence_penalty": 0,
113
- "stream": stream,
114
- "temperature": 0.5,
115
- "top_p": 0.95
116
- }
117
-
118
- timeout = ClientTimeout(total=300)
119
- try:
120
- async with session.post(cls.chat_api_endpoint, json=data, proxy=proxy, timeout=timeout) as response:
121
- if response.status not in (200, 201):
122
- error_text = await response.text()
123
- raise Exception(f"Error {response.status}: {error_text}")
124
-
125
- if stream:
126
- async def stream_content():
127
- async for line in response.content:
128
- line = line.decode('utf-8').strip()
129
- if line.startswith('data: '):
130
- if line == 'data: [DONE]':
131
- break
132
- try:
133
- chunk = json.loads(line[6:])
134
- if 'choices' in chunk and len(chunk['choices']) > 0:
135
- choice = chunk['choices'][0]
136
- if 'delta' in choice:
137
- content = choice['delta'].get('content')
138
- elif 'text' in choice:
139
- content = choice['text']
140
- else:
141
- content = None
142
- if content:
143
- yield content
144
- except json.JSONDecodeError:
145
- pass
146
- return stream_content()
147
- else:
148
- response_data = await response.json()
149
- if 'choices' in response_data and len(response_data['choices']) > 0:
150
- content = response_data['choices'][0]['message']['content']
151
- return content
152
- else:
153
- return ""
154
- except Exception as e:
155
- logger.error(f"Error during request: {e}")
156
- raise
157
- else:
158
- # Handle image models or other cases if necessary
159
- pass
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
api/routes.py DELETED
@@ -1,69 +0,0 @@
1
- # api/routes.py
2
-
3
- import json
4
- from fastapi import APIRouter, Depends, HTTPException, Request, Response
5
- from fastapi.responses import StreamingResponse
6
- from api.auth import verify_app_secret
7
- from api.models import ChatRequest
8
- from api.utils import process_non_streaming_response, process_streaming_response
9
- from api.providers import AmigoChat
10
- from api.logger import setup_logger
11
-
12
- logger = setup_logger(__name__)
13
-
14
- router = APIRouter()
15
-
16
- @router.options("/v1/chat/completions")
17
- @router.options("/api/v1/chat/completions")
18
- async def chat_completions_options():
19
- return Response(
20
- status_code=200,
21
- headers={
22
- "Access-Control-Allow-Origin": "*",
23
- "Access-Control-Allow-Methods": "POST, OPTIONS",
24
- "Access-Control-Allow-Headers": "Content-Type, Authorization",
25
- },
26
- )
27
-
28
- @router.get("/v1/models")
29
- @router.get("/api/v1/models")
30
- async def list_models():
31
- allowed_models = [{"id": model, "name": model} for model in AmigoChat.models]
32
- return {"object": "list", "data": allowed_models}
33
-
34
- @router.post("/v1/chat/completions")
35
- @router.post("/api/v1/chat/completions")
36
- async def chat_completions(
37
- request: ChatRequest, app_secret: str = Depends(verify_app_secret)
38
- ):
39
- logger.info("Entering chat_completions route")
40
- logger.info(f"Processing chat completion request for model: {request.model}")
41
-
42
- valid_models = AmigoChat.models + list(AmigoChat.model_aliases.keys())
43
-
44
- if request.model not in valid_models:
45
- raise HTTPException(
46
- status_code=400,
47
- detail=f"Model {request.model} is not allowed. Allowed models are: {', '.join(AmigoChat.models)}",
48
- )
49
-
50
- if request.stream:
51
- logger.info("Streaming response")
52
- return StreamingResponse(
53
- process_streaming_response(request),
54
- media_type="text/event-stream",
55
- headers={"Cache-Control": "no-cache"},
56
- )
57
- else:
58
- logger.info("Non-streaming response")
59
- response = await process_non_streaming_response(request)
60
- return Response(content=json.dumps(response), media_type="application/json")
61
-
62
- @router.get('/')
63
- @router.get('/healthz')
64
- @router.get('/ready')
65
- @router.get('/alive')
66
- @router.get('/status')
67
- @router.get("/health")
68
- def health_check(request: Request):
69
- return Response(content=json.dumps({"status": "ok"}), media_type="application/json")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
api/utils.py DELETED
@@ -1,90 +0,0 @@
1
- # api/utils.py
2
-
3
- from datetime import datetime
4
- import json
5
- import uuid
6
- from typing import Any, Dict, Optional, AsyncGenerator, List
7
-
8
- from fastapi import HTTPException
9
- from api.models import ChatRequest, Message
10
- from api.logger import setup_logger
11
- from api.providers import AmigoChat
12
-
13
- logger = setup_logger(__name__)
14
-
15
- async def process_streaming_response(request: ChatRequest) -> AsyncGenerator[str, None]:
16
- logger.info("Processing streaming response with AmigoChat")
17
- messages = [msg.dict() for msg in request.messages]
18
-
19
- try:
20
- async for content in await AmigoChat.generate_response(
21
- model=request.model,
22
- messages=messages,
23
- stream=True
24
- ):
25
- timestamp = int(datetime.now().timestamp())
26
- chunk = {
27
- "id": f"chatcmpl-{uuid.uuid4()}",
28
- "object": "chat.completion.chunk",
29
- "created": timestamp,
30
- "model": request.model,
31
- "choices": [
32
- {
33
- "index": 0,
34
- "delta": {"content": content},
35
- "finish_reason": None,
36
- }
37
- ],
38
- }
39
- yield f"data: {json.dumps(chunk)}\n\n"
40
-
41
- # Indicate the end of the stream
42
- end_chunk = {
43
- "id": f"chatcmpl-{uuid.uuid4()}",
44
- "object": "chat.completion.chunk",
45
- "created": int(datetime.now().timestamp()),
46
- "model": request.model,
47
- "choices": [
48
- {
49
- "index": 0,
50
- "delta": {},
51
- "finish_reason": "stop",
52
- }
53
- ],
54
- }
55
- yield f"data: {json.dumps(end_chunk)}\n\n"
56
- yield "data: [DONE]\n\n"
57
-
58
- except Exception as e:
59
- logger.error(f"Error in streaming response: {e}")
60
- raise HTTPException(status_code=500, detail=str(e))
61
-
62
- async def process_non_streaming_response(request: ChatRequest):
63
- logger.info("Processing non-streaming response with AmigoChat")
64
- messages = [msg.dict() for msg in request.messages]
65
-
66
- try:
67
- content = await AmigoChat.generate_response(
68
- model=request.model,
69
- messages=messages,
70
- stream=False
71
- )
72
-
73
- return {
74
- "id": f"chatcmpl-{uuid.uuid4()}",
75
- "object": "chat.completion",
76
- "created": int(datetime.now().timestamp()),
77
- "model": request.model,
78
- "choices": [
79
- {
80
- "index": 0,
81
- "message": {"role": "assistant", "content": content},
82
- "finish_reason": "stop",
83
- }
84
- ],
85
- "usage": None,
86
- }
87
-
88
- except Exception as e:
89
- logger.error(f"Error in non-streaming response: {e}")
90
- raise HTTPException(status_code=500, detail=str(e))