MirraAI / app.py
NekoMirra's picture
Update app.py
d332a8a verified
raw
history blame
9.81 kB
# 配置
import os
install_path = '/home/xlab-app-center'
webui_repo = 'AUTOMATIC1111/stable-diffusion-webui --branch v1.9.4'
rename_repo = 'stable-diffusion-webui'
webui_port = 7860
api_auth = 'Echoflare:Tt25faj8'
download_tool = 'aria2c --console-log-level=error -c -x 16 -s 16 -k 1M'
webui_args = [
'--api',
'--xformers',
'--no-hashing',
'--disable-nan-check',
'--disable-console-progressbars',
'--enable-console-prompts',
#'--no-gradio-queue',
'--no-half-vae',
"--skip-torch-cuda-test",
"--allow-code"
# f'--api-auth={api_auth}',
# '--freeze-settings',
]
extensions = [
'https://gitcode.com/zanllp/sd-webui-infinite-image-browsing',
'https://gitcode.com/dtlnor/stable-diffusion-webui-localization-zh_CN', # 汉化
'https://gitcode.com/DominikDoom/a1111-sd-webui-tagcomplete', # 提示词提示器
'https://gitcode.com/Mikubill/sd-webui-controlnet', # ControlNet
"https://gitcode.net/overbill1683/stable-diffusion-webui-localization-zh_Hans",
"https://gitcode.net/ranting8323/adetailer",
"https://gitcode.net/ranting8323/sd-webui-inpaint-anything",
"https://openi.pcl.ac.cn/2575044704/sd-extension-system-info",
"https://openi.pcl.ac.cn/2575044704/batchlinks-webui",
'https://openi.pcl.ac.cn/2575044704/stable-diffusion-webui-localization-zh_CN',
'https://openi.pcl.ac.cn/2575044704/sd-webui-lora-block-weight',
'https://openi.pcl.ac.cn/2575044704/sd-skin-extension',
"https://kkgithub.com/thygate/stable-diffusion-webui-depthmap-script.git",
"https://kkgithub.com/continue-revolution/sd-webui-animatediff.git",
"https://kkgithub.com/Iyashinouta/sd-model-downloader.git",
"https://kkgithub.com/fkunn1326/openpose-editor.git",
"https://kkgithub.com/zero01101/openOutpaint-webUI-extension.git",
"https://kkgithub.com/LonicaMewinsky/gif2gif.git",
"https://openi.pcl.ac.cn/2575044704/sd-webui-agent-scheduler",
"https://openi.pcl.ac.cn/2575044704/sd-webui-depth-lib",
"https://openi.pcl.ac.cn/Echoflare/letest"
]
sd_models = [
]
lora_models = [
"https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/ba.safetensors",
"https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/racaco2.safetensors",
]
vae_models = [
"https://hf-mirror.com/datasets/VASVASVAS/vae/resolve/main/pastel-waifu-diffusion.vae.pt"
]
ControlNet = False
embedding_models = [
]
hypernetwork_models = []
esrgan_models = []
custom_commands = [
f'rm -rf {install_path}/{rename_repo}/config.json',
f'rm -rf {install_path}/{rename_repo}/ui-config.json',
f'rm -rf {install_path}/{rename_repo}/modules/ui_settings.py',
f"{download_tool} https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/config-pub.json -d {install_path}/{rename_repo} -o config.json --allow-overwrite=true",
f"{download_tool} https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pub3.json -d {install_path}/{rename_repo} -o ui-config.json --allow-overwrite=true",
f"{download_tool} https://hf-mirror.com/datasets/Mira-LeafTown/sd-webui-openxlab/raw/main/ui_settings.py -d {install_path}/{rename_repo}/modules -o ui_settings.py --allow-overwrite=true",
]
# 网页UI部署
def monitor_gpu():
import os
import re
import os
import time
import threading
import wandb
import pynvml as nvidia_smi
def download_extensions(extensions):
os.chdir(f'{install_path}/{rename_repo}/extensions')
for extension in extensions:
os.system(f'git clone {extension}')
def model_download(models, type_w):
for model in models:
download_files(model, type_w)
def remove_restart():
os.chdir("/home/xlab-app-center/stable-diffusion-webui/html")
os.system("rm ./footer.html && wget -O footer.html https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/footer.html")
#os.chdir("/home/xlab-app-center/stable-diffusion-webui/modules")
#os.system("rm ./ui_settings.py && wget -O ui_settings.py https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui_settings.py")
def download_files(url, source):
if '@' in url and (not url.startswith('http://') and not url.startswith('https://')):
parts = url.split('@', 1)
name = parts[0]
url = parts[1]
rename = f"-o '{name}'"
if 'huggingface.co' in url:
url = url.replace("huggingface.co", "hf-mirror.com")
else:
if ('huggingface.co' or 'hf-mirror.com' or 'huggingface.sukaka.top') in url:
url = url.replace("huggingface.co", "hf-mirror.com")
match_name = re.search(r'/([^/?]+)(?:\?download=true)?$', url).group(1)
if match_name:
rename = f"-o '{match_name}'"
else:
rename = ''
else:
rename = ''
source_dir = f'{install_path}/{rename_repo}/{source}'
os.makedirs(source_dir, exist_ok=True)
os.chdir(source_dir)
os.system(f"{download_tool} '{url}' {rename}")
def run_webui():
os.system("pip install nvidia-ml-py3 wandb")
# 创建并启动监控线程
monitor_thread = threading.Thread(target=monitor_gpu)
monitor_thread.start()
os.chdir(install_path)
if not os.path.exists(f'{install_path}/{rename_repo}'):
os.system(f"git clone https://openi.pcl.ac.cn/2575044704/stable-diffusion-webui-v1.8.0 {install_path}/{rename_repo}")
remove_restart()
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/kaggle/input/museum/131-half.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion -o [萌二次元]131-half.safetensors")
if not os.path.exists(f'{install_path}/{rename_repo}'):
print(f'在克隆 https://{git_url}/{webui_repo} 时出错')
run_webui()
download_extensions(extensions)
model_download(sd_models, 'models/Stable-diffusion')
model_download(lora_models, 'models/Lora')
model_download(vae_models, 'models/VAE')
if ControlNet:
model_download(controlnet_models, 'extensions/sd-webui-controlnet/models')
model_download(hypernetwork_models, 'models/hypernetworks')
model_download(embedding_models, 'embeddings')
model_download(esrgan_models, 'models/ESRGAN')
os.chdir(f"{install_path}/{rename_repo}")
package_envs = [
{"env": "STABLE_DIFFUSION_XL_REPO", "url": os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://gitcode.net/overbill1683/generative-models")},
{"env": "K_DIFFUSION_REPO", "url": os.environ.get('K_DIFFUSION_REPO', "https://gitcode.net/overbill1683/k-diffusion")},
{"env": "CODEFORMER_REPO", "url": os.environ.get('CODEFORMER_REPO', "https://gitcode.net/overbill1683/CodeFormer")},
{"env": "BLIP_REPO", "url": os.environ.get('BLIP_REPO', "https://gitcode.net/overbill1683/BLIP")},
{"env": "CLIP_REPO", "url": os.environ.get('CLIP_REPO', "https://kkgithub.com/openai/CLIP")},
]
os.environ["PIP_INDEX_URL"] = "https://mirrors.aliyun.com/pypi/simple/"
for i in package_envs:
os.environ[i["env"]] = i["url"]
os.chdir(install_path)
for custom_command in custom_commands:
os.system(custom_command)
os.chdir(f"{install_path}/{rename_repo}")
os.system(f"python launch.py {' '.join(webui_args)} --port {webui_port}")
# 实例保活
import time
def session_saver():
try:
import cupy as cp
except ImportError:
print("cupy模块未安装,正在安装...")
try:
import pip
pip.main(['install', 'cupy'])
import cupy as cp
except ImportError:
print("无法安装模块,请确保已正确安装pip。")
return
while True:
for _ in range(1):
matrix_a = cp.random.rand(2000, 2000)
matrix_b = cp.random.rand(2000, 2000)
result = cp.dot(matrix_a, matrix_b)
print("实例保活:", result)
del matrix_a, matrix_b, result
cp.cuda.Stream.null.synchronize()
time.sleep(600)
# 启动
import threading
import subprocess
import time
import threading
def setup_and_run_services():
packages = ["pyngrok", "jupyterlab"]
ngrok_token = "2CXyNlT9xGfFoL5ruI6hQV20FNq_7tbmuzS9RtyNTkyEe1J6C"
http_port = "8083"
# 安装Python包
subprocess.Popen(["pip", "install"] + packages, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
# 等待包安装完成
time.sleep(20)
# 启动ngrok进程
ngrok_command = ["ngrok", "http", http_port, "--authtoken=" + ngrok_token]
ngrok_process = subprocess.Popen(ngrok_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# 启动jupyter-lab进程
jupyter_command = [
"jupyter-lab",
"--no-browser",
"--ip=0.0.0.0",
"--allow-root",
"--notebook-dir=/",
"--port=" + http_port,
"--LabApp.allow_origin=*",
"--LabApp.token=",
"--LabApp.base_url="
]
jupyter_process = subprocess.Popen(jupyter_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# 等待进程完成
ngrok_out, ngrok_err = ngrok_process.communicate()
jupyter_out, jupyter_err = jupyter_process.communicate()
# 打印输出和错误信息
print("Ngrok Output:", ngrok_out.decode())
print("Ngrok Error:", ngrok_err.decode())
print("Jupyter Output:", jupyter_out.decode())
print("Jupyter Error:", jupyter_err.decode())
def run_in_background():
"""在后台线程中运行服务设置函数"""
thread = threading.Thread(target=setup_and_run_services)
thread.start()
return thread
def run():
background_thread = run_in_background()
print("jupyterlab服务正在后台运行...")
webui = threading.Thread(target=run_webui)
#saver = threading.Thread(target=session_saver)
webui.start()
#saver.start()