github-actions[bot]
GitHub deploy: 64dce4ac99f841113dc089836cc3b19b04e20d44
2ed0f6b
raw
history blame
7.21 kB
import asyncio
import socketio
import logging
import sys
import time
from open_webui.models.users import Users
from open_webui.env import (
ENABLE_WEBSOCKET_SUPPORT,
WEBSOCKET_MANAGER,
WEBSOCKET_REDIS_URL,
)
from open_webui.utils.auth import decode_token
from open_webui.socket.utils import RedisDict, RedisLock
from open_webui.env import (
GLOBAL_LOG_LEVEL,
SRC_LOG_LEVELS,
)
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["SOCKET"])
if WEBSOCKET_MANAGER == "redis":
mgr = socketio.AsyncRedisManager(WEBSOCKET_REDIS_URL)
sio = socketio.AsyncServer(
cors_allowed_origins=[],
async_mode="asgi",
transports=(["websocket"] if ENABLE_WEBSOCKET_SUPPORT else ["polling"]),
allow_upgrades=ENABLE_WEBSOCKET_SUPPORT,
always_connect=True,
client_manager=mgr,
)
else:
sio = socketio.AsyncServer(
cors_allowed_origins=[],
async_mode="asgi",
transports=(["websocket"] if ENABLE_WEBSOCKET_SUPPORT else ["polling"]),
allow_upgrades=ENABLE_WEBSOCKET_SUPPORT,
always_connect=True,
)
# Timeout duration in seconds
TIMEOUT_DURATION = 3
# Dictionary to maintain the user pool
if WEBSOCKET_MANAGER == "redis":
log.debug("Using Redis to manage websockets.")
SESSION_POOL = RedisDict("open-webui:session_pool", redis_url=WEBSOCKET_REDIS_URL)
USER_POOL = RedisDict("open-webui:user_pool", redis_url=WEBSOCKET_REDIS_URL)
USAGE_POOL = RedisDict("open-webui:usage_pool", redis_url=WEBSOCKET_REDIS_URL)
clean_up_lock = RedisLock(
redis_url=WEBSOCKET_REDIS_URL,
lock_name="usage_cleanup_lock",
timeout_secs=TIMEOUT_DURATION * 2,
)
aquire_func = clean_up_lock.aquire_lock
renew_func = clean_up_lock.renew_lock
release_func = clean_up_lock.release_lock
else:
SESSION_POOL = {}
USER_POOL = {}
USAGE_POOL = {}
aquire_func = release_func = renew_func = lambda: True
async def periodic_usage_pool_cleanup():
if not aquire_func():
log.debug("Usage pool cleanup lock already exists. Not running it.")
return
log.debug("Running periodic_usage_pool_cleanup")
try:
while True:
if not renew_func():
log.error(f"Unable to renew cleanup lock. Exiting usage pool cleanup.")
raise Exception("Unable to renew usage pool cleanup lock.")
now = int(time.time())
send_usage = False
for model_id, connections in list(USAGE_POOL.items()):
# Creating a list of sids to remove if they have timed out
expired_sids = [
sid
for sid, details in connections.items()
if now - details["updated_at"] > TIMEOUT_DURATION
]
for sid in expired_sids:
del connections[sid]
if not connections:
log.debug(f"Cleaning up model {model_id} from usage pool")
del USAGE_POOL[model_id]
else:
USAGE_POOL[model_id] = connections
send_usage = True
if send_usage:
# Emit updated usage information after cleaning
await sio.emit("usage", {"models": get_models_in_use()})
await asyncio.sleep(TIMEOUT_DURATION)
finally:
release_func()
app = socketio.ASGIApp(
sio,
socketio_path="/ws/socket.io",
)
def get_models_in_use():
# List models that are currently in use
models_in_use = list(USAGE_POOL.keys())
return models_in_use
@sio.on("usage")
async def usage(sid, data):
model_id = data["model"]
# Record the timestamp for the last update
current_time = int(time.time())
# Store the new usage data and task
USAGE_POOL[model_id] = {
**(USAGE_POOL[model_id] if model_id in USAGE_POOL else {}),
sid: {"updated_at": current_time},
}
# Broadcast the usage data to all clients
await sio.emit("usage", {"models": get_models_in_use()})
@sio.event
async def connect(sid, environ, auth):
user = None
if auth and "token" in auth:
data = decode_token(auth["token"])
if data is not None and "id" in data:
user = Users.get_user_by_id(data["id"])
if user:
SESSION_POOL[sid] = user.id
if user.id in USER_POOL:
USER_POOL[user.id] = USER_POOL[user.id] + [sid]
else:
USER_POOL[user.id] = [sid]
# print(f"user {user.name}({user.id}) connected with session ID {sid}")
await sio.emit("user-count", {"count": len(USER_POOL.items())})
await sio.emit("usage", {"models": get_models_in_use()})
@sio.on("user-join")
async def user_join(sid, data):
# print("user-join", sid, data)
auth = data["auth"] if "auth" in data else None
if not auth or "token" not in auth:
return
data = decode_token(auth["token"])
if data is None or "id" not in data:
return
user = Users.get_user_by_id(data["id"])
if not user:
return
SESSION_POOL[sid] = user.id
if user.id in USER_POOL:
USER_POOL[user.id] = USER_POOL[user.id] + [sid]
else:
USER_POOL[user.id] = [sid]
# print(f"user {user.name}({user.id}) connected with session ID {sid}")
await sio.emit("user-count", {"count": len(USER_POOL.items())})
@sio.on("user-count")
async def user_count(sid):
await sio.emit("user-count", {"count": len(USER_POOL.items())})
@sio.on("chat")
async def chat(sid, data):
print("chat", sid, SESSION_POOL[sid], data)
@sio.event
async def disconnect(sid):
if sid in SESSION_POOL:
user_id = SESSION_POOL[sid]
del SESSION_POOL[sid]
USER_POOL[user_id] = [_sid for _sid in USER_POOL[user_id] if _sid != sid]
if len(USER_POOL[user_id]) == 0:
del USER_POOL[user_id]
await sio.emit("user-count", {"count": len(USER_POOL)})
else:
pass
# print(f"Unknown session ID {sid} disconnected")
def get_event_emitter(request_info):
async def __event_emitter__(event_data):
user_id = request_info["user_id"]
session_ids = list(
set(USER_POOL.get(user_id, []) + [request_info["session_id"]])
)
for session_id in session_ids:
await sio.emit(
"chat-events",
{
"chat_id": request_info["chat_id"],
"message_id": request_info["message_id"],
"data": event_data,
},
to=session_id,
)
return __event_emitter__
def get_event_call(request_info):
async def __event_call__(event_data):
response = await sio.call(
"chat-events",
{
"chat_id": request_info["chat_id"],
"message_id": request_info["message_id"],
"data": event_data,
},
to=request_info["session_id"],
)
return response
return __event_call__