Optimize log display
Browse files- main.py +8 -13
- response.py +4 -4
- test/provider_test.py +6 -2
main.py
CHANGED
@@ -105,9 +105,14 @@ class ModelRequestHandler:
|
|
105 |
models_list = provider['model'].keys()
|
106 |
if (model and model_name in models_list) or (model == "*" and model_name in models_list):
|
107 |
provider_rules.append(provider_name)
|
|
|
|
|
|
|
|
|
|
|
108 |
provider_list = []
|
109 |
for provider in config['providers']:
|
110 |
-
if model_name in provider['model'].keys() and (
|
111 |
provider_list.append(provider)
|
112 |
return provider_list
|
113 |
|
@@ -115,7 +120,6 @@ class ModelRequestHandler:
|
|
115 |
model_name = request.model
|
116 |
matching_providers = self.get_matching_providers(model_name, token)
|
117 |
# print("matching_providers", json.dumps(matching_providers, indent=4, ensure_ascii=False))
|
118 |
-
|
119 |
if not matching_providers:
|
120 |
raise HTTPException(status_code=404, detail="No matching model found")
|
121 |
|
@@ -136,19 +140,10 @@ class ModelRequestHandler:
|
|
136 |
try:
|
137 |
response = await process_request(request, provider)
|
138 |
return response
|
139 |
-
except HTTPException as e:
|
140 |
-
print('\033[31m')
|
141 |
-
print(f"Error with provider {provider['provider']}: {str(e)}")
|
142 |
-
traceback.print_exc()
|
143 |
-
print('\033[0m')
|
144 |
-
if use_round_robin:
|
145 |
-
continue
|
146 |
-
else:
|
147 |
-
raise HTTPException(status_code=500, detail="Error: Current provider response failed!")
|
148 |
-
except Exception as e:
|
149 |
print('\033[31m')
|
150 |
print(f"Error with provider {provider['provider']}: {str(e)}")
|
151 |
-
traceback.print_exc()
|
152 |
print('\033[0m')
|
153 |
if use_round_robin:
|
154 |
continue
|
|
|
105 |
models_list = provider['model'].keys()
|
106 |
if (model and model_name in models_list) or (model == "*" and model_name in models_list):
|
107 |
provider_rules.append(provider_name)
|
108 |
+
else:
|
109 |
+
for provider in config['providers']:
|
110 |
+
if model in provider['model'].keys():
|
111 |
+
provider_rules.append(provider['provider'])
|
112 |
+
|
113 |
provider_list = []
|
114 |
for provider in config['providers']:
|
115 |
+
if model_name in provider['model'].keys() and (provider_rules and provider['provider'] in provider_rules):
|
116 |
provider_list.append(provider)
|
117 |
return provider_list
|
118 |
|
|
|
120 |
model_name = request.model
|
121 |
matching_providers = self.get_matching_providers(model_name, token)
|
122 |
# print("matching_providers", json.dumps(matching_providers, indent=4, ensure_ascii=False))
|
|
|
123 |
if not matching_providers:
|
124 |
raise HTTPException(status_code=404, detail="No matching model found")
|
125 |
|
|
|
140 |
try:
|
141 |
response = await process_request(request, provider)
|
142 |
return response
|
143 |
+
except (Exception, HTTPException) as e:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
144 |
print('\033[31m')
|
145 |
print(f"Error with provider {provider['provider']}: {str(e)}")
|
146 |
+
# traceback.print_exc()
|
147 |
print('\033[0m')
|
148 |
if use_round_robin:
|
149 |
continue
|
response.py
CHANGED
@@ -70,7 +70,7 @@ async def fetch_gpt_response_stream(client, url, headers, payload):
|
|
70 |
async with client.stream('POST', url, headers=headers, json=payload) as response:
|
71 |
# print("response.status_code", response.status_code)
|
72 |
if response.status_code != 200:
|
73 |
-
print("请求失败,状态码是", response.status_code)
|
74 |
error_message = await response.aread()
|
75 |
# error_str = error_message.decode('utf-8', errors='replace')
|
76 |
# error_json = json.loads(error_str)
|
@@ -89,13 +89,13 @@ async def fetch_claude_response_stream(client, url, headers, payload, model):
|
|
89 |
async with client.stream('POST', url, headers=headers, json=payload) as response:
|
90 |
if response.status_code != 200:
|
91 |
print('\033[31m')
|
92 |
-
print(f"请求失败,状态码是{response.status_code},错误信息:")
|
93 |
error_message = await response.aread()
|
94 |
error_str = error_message.decode('utf-8', errors='replace')
|
95 |
error_json = json.loads(error_str)
|
96 |
-
print(json.dumps(error_json, indent=4, ensure_ascii=False))
|
97 |
print('\033[0m')
|
98 |
-
yield {"error": f"HTTP Error {response.status_code}", "details": error_json}
|
99 |
buffer = ""
|
100 |
async for chunk in response.aiter_text():
|
101 |
buffer += chunk
|
|
|
70 |
async with client.stream('POST', url, headers=headers, json=payload) as response:
|
71 |
# print("response.status_code", response.status_code)
|
72 |
if response.status_code != 200:
|
73 |
+
# print("请求失败,状态码是", response.status_code)
|
74 |
error_message = await response.aread()
|
75 |
# error_str = error_message.decode('utf-8', errors='replace')
|
76 |
# error_json = json.loads(error_str)
|
|
|
89 |
async with client.stream('POST', url, headers=headers, json=payload) as response:
|
90 |
if response.status_code != 200:
|
91 |
print('\033[31m')
|
92 |
+
# print(f"请求失败,状态码是{response.status_code},错误信息:")
|
93 |
error_message = await response.aread()
|
94 |
error_str = error_message.decode('utf-8', errors='replace')
|
95 |
error_json = json.loads(error_str)
|
96 |
+
# print(json.dumps(error_json, indent=4, ensure_ascii=False))
|
97 |
print('\033[0m')
|
98 |
+
yield {"error": f"fetch_claude_response_stream HTTP Error {response.status_code}", "details": error_json}
|
99 |
buffer = ""
|
100 |
async for chunk in response.aiter_text():
|
101 |
buffer += chunk
|
test/provider_test.py
CHANGED
@@ -14,9 +14,13 @@ def test_client():
|
|
14 |
def api_key():
|
15 |
return os.environ.get("API")
|
16 |
|
17 |
-
|
|
|
|
|
|
|
|
|
18 |
request_data = {
|
19 |
-
"model":
|
20 |
"messages": [
|
21 |
{
|
22 |
"role": "user",
|
|
|
14 |
def api_key():
|
15 |
return os.environ.get("API")
|
16 |
|
17 |
+
@pytest.fixture
|
18 |
+
def get_model():
|
19 |
+
return os.environ.get("MODEL", "claude-3-5-sonnet-20240620")
|
20 |
+
|
21 |
+
def test_request_model(test_client, api_key, get_model):
|
22 |
request_data = {
|
23 |
+
"model": get_model,
|
24 |
"messages": [
|
25 |
{
|
26 |
"role": "user",
|