sanbo
commited on
Commit
·
6d01f32
1
Parent(s):
dfbbd12
update sth. at 2025-01-05 15:24:55
Browse files- more_core.py +25 -7
- some_base_method/degpt25.1.5.py +86 -8
more_core.py
CHANGED
@@ -3,10 +3,12 @@ import time
|
|
3 |
import multiprocessing
|
4 |
from typing import Dict, Any, List
|
5 |
from fastapi import FastAPI, Request, Response, HTTPException
|
|
|
6 |
import uvicorn
|
7 |
import tiktoken
|
8 |
from json.decoder import JSONDecodeError
|
9 |
import random
|
|
|
10 |
import string
|
11 |
from apscheduler.schedulers.background import BackgroundScheduler
|
12 |
|
@@ -32,14 +34,21 @@ class APIServer:
|
|
32 |
|
33 |
def _setup_routes(self) -> None:
|
34 |
"""Initialize API routes"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
routes = self._get_routes()
|
36 |
for path in routes:
|
37 |
self._register_route(path)
|
38 |
|
39 |
-
@self.app.get("/")
|
40 |
-
async def root() -> str:
|
41 |
-
return "你好"
|
42 |
-
|
43 |
def _get_routes(self) -> List[str]:
|
44 |
"""Get configured API routes"""
|
45 |
default_path = "/v1/chat/completions"
|
@@ -97,15 +106,22 @@ class APIServer:
|
|
97 |
def is_chatgpt_format(self, data):
|
98 |
"""Check if the data is in the expected ChatGPT format"""
|
99 |
try:
|
100 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
if isinstance(data, dict):
|
102 |
# Ensure 'choices' is a list and the first item has a 'message' field
|
103 |
if "choices" in data and isinstance(data["choices"], list) and len(data["choices"]) > 0:
|
104 |
if "message" in data["choices"][0]:
|
105 |
return True
|
106 |
except Exception as e:
|
107 |
-
|
108 |
-
|
109 |
return False
|
110 |
|
111 |
async def _generate_response(self, headers: Dict[str, str], data: Dict[str, Any]) -> Dict[str, Any]:
|
@@ -203,6 +219,7 @@ class APIServer:
|
|
203 |
def _schedule_route_check(self) -> None:
|
204 |
"""Schedule the task to check and reload routes every 30 seconds"""
|
205 |
self.scheduler.add_job(self._reload_routes_if_needed, 'interval', seconds=30)
|
|
|
206 |
|
207 |
def _reload_routes_if_needed(self) -> None:
|
208 |
"""Check if routes need to be reloaded based on environment variables"""
|
@@ -219,6 +236,7 @@ class APIServer:
|
|
219 |
print("Routes changed, reloading...")
|
220 |
self._reload_routes(new_routes)
|
221 |
|
|
|
222 |
def _reload_routes(self, new_routes: List[str]) -> None:
|
223 |
"""Reload the routes based on the updated configuration"""
|
224 |
# Clear existing routes
|
|
|
3 |
import multiprocessing
|
4 |
from typing import Dict, Any, List
|
5 |
from fastapi import FastAPI, Request, Response, HTTPException
|
6 |
+
from fastapi.responses import RedirectResponse
|
7 |
import uvicorn
|
8 |
import tiktoken
|
9 |
from json.decoder import JSONDecodeError
|
10 |
import random
|
11 |
+
import json
|
12 |
import string
|
13 |
from apscheduler.schedulers.background import BackgroundScheduler
|
14 |
|
|
|
34 |
|
35 |
def _setup_routes(self) -> None:
|
36 |
"""Initialize API routes"""
|
37 |
+
|
38 |
+
# Home route with redirection to /web
|
39 |
+
@self.app.get("/")
|
40 |
+
async def root() -> RedirectResponse:
|
41 |
+
return RedirectResponse(url="/web")
|
42 |
+
|
43 |
+
# /web route
|
44 |
+
@self.app.get("/web")
|
45 |
+
async def web() -> str:
|
46 |
+
return "hello. It's web page."
|
47 |
+
|
48 |
routes = self._get_routes()
|
49 |
for path in routes:
|
50 |
self._register_route(path)
|
51 |
|
|
|
|
|
|
|
|
|
52 |
def _get_routes(self) -> List[str]:
|
53 |
"""Get configured API routes"""
|
54 |
default_path = "/v1/chat/completions"
|
|
|
106 |
def is_chatgpt_format(self, data):
|
107 |
"""Check if the data is in the expected ChatGPT format"""
|
108 |
try:
|
109 |
+
# If the data is a string, try to parse it as JSON
|
110 |
+
if isinstance(data, str):
|
111 |
+
try:
|
112 |
+
data = json.loads(data)
|
113 |
+
except json.JSONDecodeError:
|
114 |
+
return False # If the string can't be parsed, it's not in the expected format
|
115 |
+
|
116 |
+
# Now check if data is a dictionary and contains the necessary structure
|
117 |
if isinstance(data, dict):
|
118 |
# Ensure 'choices' is a list and the first item has a 'message' field
|
119 |
if "choices" in data and isinstance(data["choices"], list) and len(data["choices"]) > 0:
|
120 |
if "message" in data["choices"][0]:
|
121 |
return True
|
122 |
except Exception as e:
|
123 |
+
print(f"Error checking ChatGPT format: {e}")
|
124 |
+
|
125 |
return False
|
126 |
|
127 |
async def _generate_response(self, headers: Dict[str, str], data: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
219 |
def _schedule_route_check(self) -> None:
|
220 |
"""Schedule the task to check and reload routes every 30 seconds"""
|
221 |
self.scheduler.add_job(self._reload_routes_if_needed, 'interval', seconds=30)
|
222 |
+
pass
|
223 |
|
224 |
def _reload_routes_if_needed(self) -> None:
|
225 |
"""Check if routes need to be reloaded based on environment variables"""
|
|
|
236 |
print("Routes changed, reloading...")
|
237 |
self._reload_routes(new_routes)
|
238 |
|
239 |
+
|
240 |
def _reload_routes(self, new_routes: List[str]) -> None:
|
241 |
"""Reload the routes based on the updated configuration"""
|
242 |
# Clear existing routes
|
some_base_method/degpt25.1.5.py
CHANGED
@@ -15,8 +15,27 @@ def get_models():
|
|
15 |
return json.dumps(models)
|
16 |
|
17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
# 新的方法,检查model是否有效并返回id
|
19 |
-
def
|
20 |
# 获取所有模型数据
|
21 |
models_data = json.loads(get_models())["data"]
|
22 |
|
@@ -34,11 +53,31 @@ def get_model_by_id(model_id=None):
|
|
34 |
return model_data["id"] if model_data else None
|
35 |
|
36 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
def chat_completion_messages(
|
38 |
-
messages,
|
|
|
|
|
|
|
39 |
project="DecentralGPT", stream=False, temperature=0.3, max_tokens=1024, top_p=0.5,
|
40 |
frequency_penalty=0, presence_penalty=0):
|
41 |
-
"""处理用户请求并保留上下文"""
|
42 |
url = 'https://usa-chat.degpt.ai/api/v0/chat/completion/proxy'
|
43 |
headers = {
|
44 |
'sec-ch-ua-platform': '"macOS"',
|
@@ -50,17 +89,27 @@ def chat_completion_messages(
|
|
50 |
'sec-ch-ua-mobile': '?0'
|
51 |
}
|
52 |
payload = {
|
53 |
-
"model":
|
54 |
"messages": messages,
|
55 |
"project": project,
|
56 |
-
"stream": stream
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
}
|
|
|
58 |
|
|
|
|
|
|
|
59 |
try:
|
60 |
response = requests.post(url, headers=headers, json=payload)
|
61 |
response.encoding = 'utf-8'
|
62 |
response.raise_for_status()
|
63 |
-
return response.
|
64 |
except requests.exceptions.RequestException as e:
|
65 |
print(f"请求失败: {e}")
|
66 |
return "请求失败,请检查网络或参数配置。"
|
@@ -70,9 +119,33 @@ def chat_completion_messages(
|
|
70 |
return {}
|
71 |
|
72 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
if __name__ == '__main__':
|
74 |
-
print(
|
75 |
-
print(
|
|
|
|
|
76 |
print(get_models())
|
77 |
messages = [
|
78 |
{'role': 'user', 'content': '你好,你是谁?'},
|
@@ -86,6 +159,11 @@ if __name__ == '__main__':
|
|
86 |
model="Qwen2.5-72B"
|
87 |
)
|
88 |
print(response_content)
|
|
|
|
|
|
|
|
|
|
|
89 |
# # support Chinese
|
90 |
# if isinstance(response_content, str): # 如果已经是 JSON 字符串
|
91 |
# return Response(response_content, content_type="application/json; charset=utf-8")
|
|
|
15 |
return json.dumps(models)
|
16 |
|
17 |
|
18 |
+
def is_model_available(model_id):
|
19 |
+
# Get the models JSON
|
20 |
+
models_json = get_models()
|
21 |
+
|
22 |
+
# Parse the JSON string into a Python dictionary
|
23 |
+
models_data = json.loads(models_json)
|
24 |
+
|
25 |
+
# Loop through the model list to check if the model ID exists
|
26 |
+
for model in models_data.get("data", []):
|
27 |
+
if model["id"] == model_id:
|
28 |
+
return True # Model ID found
|
29 |
+
|
30 |
+
return False # Model ID not found
|
31 |
+
|
32 |
+
|
33 |
+
def get_auto_model(model_id=None):
|
34 |
+
return "Qwen2.5-72B"
|
35 |
+
|
36 |
+
|
37 |
# 新的方法,检查model是否有效并返回id
|
38 |
+
def get_model_by_autoupdate(model_id=None):
|
39 |
# 获取所有模型数据
|
40 |
models_data = json.loads(get_models())["data"]
|
41 |
|
|
|
53 |
return model_data["id"] if model_data else None
|
54 |
|
55 |
|
56 |
+
def chat_completion_message(
|
57 |
+
user_prompt,
|
58 |
+
user_id: str = None,
|
59 |
+
session_id: str = None,
|
60 |
+
system_prompt="You are a helpful assistant.",
|
61 |
+
model="Qwen2.5-72B",
|
62 |
+
project="DecentralGPT", stream=False,
|
63 |
+
temperature=0.3, max_tokens=1024, top_p=0.5,
|
64 |
+
frequency_penalty=0, presence_penalty=0):
|
65 |
+
"""未来会增加回话隔离: 单人对话,单次会话"""
|
66 |
+
messages = [
|
67 |
+
{"role": "system", "content": system_prompt},
|
68 |
+
{"role": "user", "content": user_prompt}
|
69 |
+
]
|
70 |
+
return chat_completion_messages(messages,user_id,session_id, model, project, stream, temperature, max_tokens, top_p, frequency_penalty,
|
71 |
+
presence_penalty)
|
72 |
+
|
73 |
+
|
74 |
def chat_completion_messages(
|
75 |
+
messages,
|
76 |
+
model="Qwen2.5-72B",
|
77 |
+
user_id: str = None,
|
78 |
+
session_id: str = None,
|
79 |
project="DecentralGPT", stream=False, temperature=0.3, max_tokens=1024, top_p=0.5,
|
80 |
frequency_penalty=0, presence_penalty=0):
|
|
|
81 |
url = 'https://usa-chat.degpt.ai/api/v0/chat/completion/proxy'
|
82 |
headers = {
|
83 |
'sec-ch-ua-platform': '"macOS"',
|
|
|
89 |
'sec-ch-ua-mobile': '?0'
|
90 |
}
|
91 |
payload = {
|
92 |
+
"model": get_auto_model(model),
|
93 |
"messages": messages,
|
94 |
"project": project,
|
95 |
+
"stream": stream,
|
96 |
+
"temperature": temperature,
|
97 |
+
"max_tokens": max_tokens,
|
98 |
+
"top_p": top_p,
|
99 |
+
"frequency_penalty": frequency_penalty,
|
100 |
+
"presence_penalty": presence_penalty
|
101 |
+
|
102 |
}
|
103 |
+
return chat_completion(url, headers, payload)
|
104 |
|
105 |
+
|
106 |
+
def chat_completion(url, headers, payload):
|
107 |
+
"""处理用户请求并保留上下文"""
|
108 |
try:
|
109 |
response = requests.post(url, headers=headers, json=payload)
|
110 |
response.encoding = 'utf-8'
|
111 |
response.raise_for_status()
|
112 |
+
return response.json()
|
113 |
except requests.exceptions.RequestException as e:
|
114 |
print(f"请求失败: {e}")
|
115 |
return "请求失败,请检查网络或参数配置。"
|
|
|
119 |
return {}
|
120 |
|
121 |
|
122 |
+
def is_chatgpt_format(data):
|
123 |
+
"""Check if the data is in the expected ChatGPT format"""
|
124 |
+
try:
|
125 |
+
# If the data is a string, try to parse it as JSON
|
126 |
+
if isinstance(data, str):
|
127 |
+
try:
|
128 |
+
data = json.loads(data)
|
129 |
+
except json.JSONDecodeError:
|
130 |
+
return False # If the string can't be parsed, it's not in the expected format
|
131 |
+
|
132 |
+
# Now check if data is a dictionary and contains the necessary structure
|
133 |
+
if isinstance(data, dict):
|
134 |
+
# Ensure 'choices' is a list and the first item has a 'message' field
|
135 |
+
if "choices" in data and isinstance(data["choices"], list) and len(data["choices"]) > 0:
|
136 |
+
if "message" in data["choices"][0]:
|
137 |
+
return True
|
138 |
+
except Exception as e:
|
139 |
+
print(f"Error checking ChatGPT format: {e}")
|
140 |
+
|
141 |
+
return False
|
142 |
+
|
143 |
+
|
144 |
if __name__ == '__main__':
|
145 |
+
print(get_auto_model("Qwen2.5-72B"))
|
146 |
+
print(get_auto_model("Qwen"))
|
147 |
+
print(is_model_available("Qwen2.5-72B"))
|
148 |
+
print(is_model_available("Qwen"))
|
149 |
print(get_models())
|
150 |
messages = [
|
151 |
{'role': 'user', 'content': '你好,你是谁?'},
|
|
|
159 |
model="Qwen2.5-72B"
|
160 |
)
|
161 |
print(response_content)
|
162 |
+
print(f"is chatgpt format: {is_chatgpt_format(response_content)}")
|
163 |
+
|
164 |
+
resp = chat_completion_message("你是什么模型?")
|
165 |
+
print(resp)
|
166 |
+
print(f"is chatgpt format: {is_chatgpt_format(resp)}")
|
167 |
# # support Chinese
|
168 |
# if isinstance(response_content, str): # 如果已经是 JSON 字符串
|
169 |
# return Response(response_content, content_type="application/json; charset=utf-8")
|