yym68686 commited on
Commit
133f622
·
1 Parent(s): 3c06783

Add feature: automatic URL redirection

Browse files

Fixed bug: Unable to parse response [DONE]

Files changed (3) hide show
  1. main.py +1 -1
  2. response.py +47 -15
  3. utils.py +4 -0
main.py CHANGED
@@ -160,7 +160,7 @@ class ModelRequestHandler:
160
  else:
161
  raise HTTPException(status_code=500, detail="Error: Current provider response failed!")
162
 
163
- raise HTTPException(status_code=500, detail="All providers failed")
164
 
165
  model_handler = ModelRequestHandler()
166
 
 
160
  else:
161
  raise HTTPException(status_code=500, detail="Error: Current provider response failed!")
162
 
163
+ raise HTTPException(status_code=500, detail=f"All providers failed: {request.model}")
164
 
165
  model_handler = ModelRequestHandler()
166
 
response.py CHANGED
@@ -76,26 +76,58 @@ async def fetch_gemini_response_stream(client, url, headers, payload, model):
76
  # except json.JSONDecodeError:
77
  # print(f"无法解析JSON: {buffer}")
78
 
79
- async def fetch_gpt_response_stream(client, url, headers, payload):
80
- async with client.stream('POST', url, headers=headers, json=payload) as response:
81
- if response.status_code != 200:
82
- error_message = await response.aread()
83
- error_str = error_message.decode('utf-8', errors='replace')
84
- try:
85
- error_json = json.loads(error_str)
86
- except json.JSONDecodeError:
87
- error_json = error_str
88
- yield {"error": f"fetch_gpt_response_stream HTTP Error {response.status_code}", "details": error_json}
89
- buffer = ""
90
- async for chunk in response.aiter_text():
91
- # logger.info(f"chunk: {repr(chunk)}")
92
- buffer += chunk
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
93
  while "\n" in buffer:
94
  line, buffer = buffer.split("\n", 1)
95
- # print("line", repr(line))
96
  if line and line != "data: " and line != "data:" and not line.startswith(": "):
97
  yield line + "\n"
98
 
 
 
 
 
 
 
 
 
 
 
 
 
99
  async def fetch_claude_response_stream(client, url, headers, payload, model):
100
  timestamp = datetime.timestamp(datetime.now())
101
  async with client.stream('POST', url, headers=headers, json=payload) as response:
 
76
  # except json.JSONDecodeError:
77
  # print(f"无法解析JSON: {buffer}")
78
 
79
+ async def fetch_gpt_response_stream(client, url, headers, payload, max_redirects=5):
80
+ redirect_count = 0
81
+ while redirect_count < max_redirects:
82
+ # logger.info(f"fetch_gpt_response_stream: {url}")
83
+ async with client.stream('POST', url, headers=headers, json=payload) as response:
84
+ if response.status_code != 200:
85
+ error_message = await response.aread()
86
+ error_str = error_message.decode('utf-8', errors='replace')
87
+ try:
88
+ error_json = json.loads(error_str)
89
+ except json.JSONDecodeError:
90
+ error_json = error_str
91
+ yield {"error": f"fetch_gpt_response_stream HTTP Error {response.status_code}", "details": error_json}
92
+ return
93
+
94
+ # 检查是否存在重定向脚本
95
+ content = await response.aread()
96
+ content_str = content.decode('utf-8', errors='replace')
97
+ # logger.info(f"chunk: {repr(content_str)}")
98
+ import re
99
+ redirect_match = re.search(r"window\.location\.href\s*=\s*'([^']+)'", content_str)
100
+ if redirect_match:
101
+ new_url = redirect_match.group(1)
102
+ # logger.info(f"new_url: {new_url}")
103
+ if not new_url.startswith('http'):
104
+ # 如果是相对路径,构造完整URL
105
+ # logger.info(url.split('/'))
106
+ base_url = '/'.join(url.split('/')[:3]) # 提取协议和域名
107
+ new_url = base_url + new_url
108
+ url = new_url
109
+ # logger.info(f"new_url: {new_url}")
110
+ redirect_count += 1
111
+ continue
112
+
113
+ buffer = content_str
114
  while "\n" in buffer:
115
  line, buffer = buffer.split("\n", 1)
 
116
  if line and line != "data: " and line != "data:" and not line.startswith(": "):
117
  yield line + "\n"
118
 
119
+ async for chunk in response.aiter_text():
120
+ # logger.info(f"chunk: {repr(chunk)}")
121
+ buffer += chunk
122
+ while "\n" in buffer:
123
+ line, buffer = buffer.split("\n", 1)
124
+ # logger.info("line: %s", repr(line))
125
+ if line and line != "data: " and line != "data:" and not line.startswith(": "):
126
+ yield line + "\n"
127
+ return
128
+
129
+ yield {"error": "Too many redirects", "details": f"Reached maximum of {max_redirects} redirects"}
130
+
131
  async def fetch_claude_response_stream(client, url, headers, payload, model):
132
  timestamp = datetime.timestamp(datetime.now())
133
  async with client.stream('POST', url, headers=headers, json=payload) as response:
utils.py CHANGED
@@ -89,10 +89,14 @@ async def error_handling_wrapper(generator, status_code=200):
89
  first_item_str = first_item_str[6:]
90
  elif first_item_str.startswith("data:"):
91
  first_item_str = first_item_str[5:]
 
 
 
92
  try:
93
  first_item_str = json.loads(first_item_str)
94
  except json.JSONDecodeError:
95
  logger.error("error_handling_wrapper JSONDecodeError!" + repr(first_item_str))
 
96
  if isinstance(first_item_str, dict) and 'error' in first_item_str:
97
  # 如果第一个 yield 的项是错误信息,抛出 HTTPException
98
  raise HTTPException(status_code=status_code, detail=f"{first_item_str}"[:300])
 
89
  first_item_str = first_item_str[6:]
90
  elif first_item_str.startswith("data:"):
91
  first_item_str = first_item_str[5:]
92
+ if first_item_str == "[DONE]":
93
+ logger.error("error_handling_wrapper [DONE]!")
94
+ raise StopAsyncIteration
95
  try:
96
  first_item_str = json.loads(first_item_str)
97
  except json.JSONDecodeError:
98
  logger.error("error_handling_wrapper JSONDecodeError!" + repr(first_item_str))
99
+ raise StopAsyncIteration
100
  if isinstance(first_item_str, dict) and 'error' in first_item_str:
101
  # 如果第一个 yield 的项是错误信息,抛出 HTTPException
102
  raise HTTPException(status_code=status_code, detail=f"{first_item_str}"[:300])