Spaces:
Running
Running
kevin
commited on
Commit
·
3e71e74
1
Parent(s):
b5667ec
rich格式化输出
Browse files- core/router.py +3 -1
- core/utils.py +6 -2
- requirements.txt +2 -1
core/router.py
CHANGED
@@ -17,6 +17,7 @@ from core.logger import setup_logger
|
|
17 |
from core.models import ChatRequest
|
18 |
from core.utils import process_streaming_response
|
19 |
from playsound import playsound # 用于播放音频
|
|
|
20 |
|
21 |
logger = setup_logger(__name__)
|
22 |
router = APIRouter()
|
@@ -201,7 +202,8 @@ async def chat_completions(
|
|
201 |
):
|
202 |
logger.info("Entering chat_completions route")
|
203 |
# logger.info(f"Received request: {request}")
|
204 |
-
logger.info(f"Received request json format: {json.dumps(request.dict(), indent=4)}")
|
|
|
205 |
logger.info(f"App secret: {app_secret}")
|
206 |
logger.info(f"Received chat completion request for model: {request.model}")
|
207 |
|
|
|
17 |
from core.models import ChatRequest
|
18 |
from core.utils import process_streaming_response
|
19 |
from playsound import playsound # 用于播放音频
|
20 |
+
from rich import print
|
21 |
|
22 |
logger = setup_logger(__name__)
|
23 |
router = APIRouter()
|
|
|
202 |
):
|
203 |
logger.info("Entering chat_completions route")
|
204 |
# logger.info(f"Received request: {request}")
|
205 |
+
# logger.info(f"Received request json format: {json.dumps(request.dict(), indent=4)}")
|
206 |
+
logger.info(f"Received request json format: {json.dumps(request.model_dump())}")
|
207 |
logger.info(f"App secret: {app_secret}")
|
208 |
logger.info(f"Received chat completion request for model: {request.model}")
|
209 |
|
core/utils.py
CHANGED
@@ -16,6 +16,7 @@ from starlette import status
|
|
16 |
from core.config import get_settings
|
17 |
from core.logger import setup_logger
|
18 |
from core.models import ChatRequest
|
|
|
19 |
|
20 |
settings = get_settings()
|
21 |
logger = setup_logger(__name__)
|
@@ -208,8 +209,11 @@ async def process_streaming_response(request: ChatRequest, app_secret: str):
|
|
208 |
# "max_tokens": request.max_tokens,
|
209 |
# "stream": request.stream
|
210 |
# }
|
211 |
-
print("Request Headers:", json.dumps(request_headers, indent=2)) # 格式化打印
|
212 |
-
print("Request Body:", json.dumps(request.json(), indent=4, ensure_ascii=False)) # 格式化打印
|
|
|
|
|
|
|
213 |
async with client.stream(
|
214 |
"POST",
|
215 |
f"https://api.thinkbuddy.ai/v1/chat/completions",
|
|
|
16 |
from core.config import get_settings
|
17 |
from core.logger import setup_logger
|
18 |
from core.models import ChatRequest
|
19 |
+
from rich import print
|
20 |
|
21 |
settings = get_settings()
|
22 |
logger = setup_logger(__name__)
|
|
|
209 |
# "max_tokens": request.max_tokens,
|
210 |
# "stream": request.stream
|
211 |
# }
|
212 |
+
# print("Request Headers:", json.dumps(request_headers, indent=2)) # 格式化打印
|
213 |
+
# print("Request Body:", json.dumps(request.json(), indent=4, ensure_ascii=False)) # 格式化打印
|
214 |
+
print("Request Headers:", json.dumps(request_headers)) # 格式化打印
|
215 |
+
print("Request Body:", json.dumps(request.json())) # 格式化打印
|
216 |
+
|
217 |
async with client.stream(
|
218 |
"POST",
|
219 |
f"https://api.thinkbuddy.ai/v1/chat/completions",
|
requirements.txt
CHANGED
@@ -9,4 +9,5 @@ starlette
|
|
9 |
uvicorn
|
10 |
|
11 |
playsound
|
12 |
-
python-multipart
|
|
|
|
9 |
uvicorn
|
10 |
|
11 |
playsound
|
12 |
+
python-multipart
|
13 |
+
rich
|