|
|
|
|
|
import os |
|
import re |
|
import time |
|
import json |
|
import shutil |
|
import zipfile |
|
import requests |
|
import subprocess |
|
from datetime import timedelta |
|
from subprocess import getoutput |
|
from IPython.utils import capture |
|
from IPython.display import clear_output |
|
from urllib.parse import urlparse, parse_qs |
|
|
|
|
|
|
|
def detect_environment(): |
|
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20) |
|
environments = { |
|
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"), |
|
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content") |
|
} |
|
for env_var, (environment, path) in environments.items(): |
|
if env_var in os.environ: |
|
return environment, path, free_plan |
|
|
|
env, root_path, free_plan = detect_environment() |
|
webui_path = f"{root_path}/sdw" |
|
|
|
|
|
|
|
flag_file = f"{root_path}/libraries_installed.txt" |
|
|
|
if not os.path.exists(flag_file): |
|
print("💿 Установка библиотек, это займет какое-то время:\n") |
|
|
|
install_lib = { |
|
"aria2": "apt -y install aria2", |
|
"localtunnel": "npm install -g localtunnel", |
|
"insightface": "pip install insightface" |
|
} |
|
|
|
additional_libs = { |
|
"Google Colab": { |
|
"xformers": "pip install xformers==0.0.26.post1 --no-deps" |
|
}, |
|
"Kaggle": { |
|
"xformers": "pip install xformers==0.0.26.post1", |
|
|
|
"aiohttp": "pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*" |
|
} |
|
} |
|
|
|
if env in additional_libs: |
|
install_lib.update(additional_libs[env]) |
|
|
|
|
|
for index, (package, install_cmd) in enumerate(install_lib.items(), start=1): |
|
print(f"\r[{index}/{len(install_lib)}] \033[32m>>\033[0m Installing \033[33m{package}\033[0m..." + " "*35, end='') |
|
subprocess.run(install_cmd, shell=True, capture_output=True) |
|
|
|
|
|
with capture.capture_output() as cap: |
|
get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}') |
|
get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl') |
|
get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz') |
|
del cap |
|
|
|
clear_output() |
|
|
|
|
|
with open(flag_file, "w") as f: |
|
f.write(">W<'") |
|
|
|
print("🍪 Библиотеки установлены!" + " "*35) |
|
time.sleep(2) |
|
clear_output() |
|
|
|
|
|
|
|
def load_settings(path): |
|
if os.path.exists(path): |
|
with open(path, 'r') as file: |
|
return json.load(file) |
|
return {} |
|
|
|
settings = load_settings(f'{root_path}/settings.json') |
|
|
|
VARIABLES = [ |
|
'model', 'model_num', 'inpainting_model', |
|
'vae', 'vae_num', 'latest_webui', 'latest_exstensions', |
|
'change_webui', 'detailed_download', 'controlnet', |
|
'controlnet_num', 'commit_hash', 'huggingface_token', |
|
'ngrok_token', 'zrok_token', 'commandline_arguments', |
|
'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url', |
|
'Extensions_url', 'custom_file_urls' |
|
] |
|
|
|
locals().update({key: settings.get(key) for key in VARIABLES}) |
|
|
|
|
|
|
|
try: |
|
start_colab |
|
except: |
|
start_colab = int(time.time())-5 |
|
|
|
|
|
models_dir = f"{webui_path}/models/Stable-diffusion" |
|
vaes_dir = f"{webui_path}/models/VAE" |
|
embeddings_dir = f"{webui_path}/embeddings" |
|
loras_dir = f"{webui_path}/models/Lora" |
|
extensions_dir = f"{webui_path}/extensions" |
|
control_dir = f"{webui_path}/models/ControlNet" |
|
adetailer_dir = f"{webui_path}/models/adetailer" |
|
|
|
|
|
|
|
if not os.path.exists(webui_path): |
|
start_install = int(time.time()) |
|
print("⌚ Распаковка Stable Diffusion..." if change_webui != 'Forge' else "⌚ Распаковка Stable Diffusion (Forge)...", end='') |
|
with capture.capture_output() as cap: |
|
aria2_command = "aria2c --console-log-level=error -c -x 16 -s 16 -k 1M" |
|
url = "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip" if change_webui != 'Forge' else "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip" |
|
get_ipython().system('{aria2_command} {url} -o repo.zip') |
|
|
|
get_ipython().system('unzip -q -o repo.zip -d {webui_path}') |
|
get_ipython().system('rm -rf repo.zip') |
|
|
|
get_ipython().run_line_magic('cd', '{root_path}') |
|
os.environ["SAFETENSORS_FAST_GPU"]='1' |
|
os.environ["CUDA_MODULE_LOADING"]="LAZY" |
|
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" |
|
os.environ["PYTHONWARNINGS"] = "ignore" |
|
|
|
get_ipython().system('echo -n {start_colab} > {webui_path}/static/colabTimer.txt') |
|
del cap |
|
install_time = timedelta(seconds=time.time()-start_install) |
|
print("\r🚀 Распаковка Завершена! За","%02d:%02d:%02d ⚡\n" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True) |
|
else: |
|
print("🚀 Все распакованно... Пропуск. ⚡") |
|
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read()) |
|
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0] |
|
print(f"⌚️ Вы проводите эту сессию в течение - \033[33m{time_since_start}\033[0m") |
|
|
|
|
|
|
|
if latest_webui or latest_exstensions: |
|
action = "Обновление WebUI и Расширений" if latest_webui and latest_exstensions else ("Обновление WebUI" if latest_webui else "Обновление Расширений") |
|
print(f"⌚️ {action}...", end='', flush=True) |
|
with capture.capture_output() as cap: |
|
get_ipython().system('git config --global user.email "[email protected]"') |
|
get_ipython().system('git config --global user.name "Your Name"') |
|
|
|
|
|
if latest_webui: |
|
get_ipython().run_line_magic('cd', '{webui_path}') |
|
get_ipython().system('git restore .') |
|
get_ipython().system('git pull -X theirs --rebase --autostash') |
|
|
|
|
|
if latest_exstensions: |
|
get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}') |
|
del cap |
|
print(f"\r✨ {action} Завершено!") |
|
|
|
|
|
|
|
anxety_repos = "https://huggingface.co/NagisaNao/fast_repo/resolve/main" |
|
|
|
with capture.capture_output() as cap: |
|
|
|
get_ipython().system("sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default") |
|
|
|
|
|
get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui") |
|
|
|
|
|
get_ipython().system('wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style') |
|
del cap |
|
|
|
|
|
|
|
if commit_hash: |
|
print('⏳ Активация машины времени...', end="", flush=True) |
|
with capture.capture_output() as cap: |
|
get_ipython().run_line_magic('cd', '{webui_path}') |
|
get_ipython().system('git config --global user.email "[email protected]"') |
|
get_ipython().system('git config --global user.name "Your Name"') |
|
get_ipython().system('git reset --hard {commit_hash}') |
|
del cap |
|
print(f"\r⌛️ Машина времени активированна! Текущий коммит: \033[34m{commit_hash}\033[0m") |
|
|
|
|
|
|
|
print("📦 Скачивание моделей и прочего...", end='') |
|
model_list = { |
|
"1.Anime (by XpucT) + INP": [ |
|
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_v2.safetensors"}, |
|
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_v2-inpainting.safetensors"} |
|
], |
|
"2.BluMix [Anime] [V7] + INP": [ |
|
{"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_v7.safetensors"}, |
|
{"url": "https://civitai.com/api/download/models/363850", "name": "BluMix_v7-inpainting.safetensors"} |
|
], |
|
"3.Cetus-Mix [Anime] [V4] + INP": [ |
|
{"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"}, |
|
{"url": "https://civitai.com/api/download/models/139882", "name": "CetusMix_V4-inpainting.safetensors"} |
|
], |
|
"4.Counterfeit [Anime] [V3] + INP": [ |
|
{"url": "https://civitai.com/api/download/models/125050", "name": "Counterfeit_V3.safetensors"}, |
|
{"url": "https://civitai.com/api/download/models/137911", "name": "Counterfeit_V3-inpainting.safetensors"} |
|
], |
|
"5.CuteColor [Anime] [V3]": [ |
|
{"url": "https://civitai.com/api/download/models/138754", "name": "CuteColor_V3.safetensors"} |
|
], |
|
"6.Dark-Sushi-Mix [Anime]": [ |
|
{"url": "https://civitai.com/api/download/models/101640", "name": "DarkSushiMix_2_5D.safetensors"}, |
|
{"url": "https://civitai.com/api/download/models/56071", "name": "DarkSushiMix_colorful.safetensors"} |
|
], |
|
"7.Deliberate [Realism] [V6] + INP": [ |
|
{"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors", "name": "Deliberate_v6.safetensors"}, |
|
{"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors", "name": "Deliberate_v6-inpainting.safetensors"} |
|
], |
|
"8.Meina-Mix [Anime] [V11] + INP": [ |
|
{"url": "https://civitai.com/api/download/models/119057", "name": "MeinaMix_V11.safetensors"}, |
|
{"url": "https://civitai.com/api/download/models/120702", "name": "MeinaMix_V11-inpainting.safetensors"} |
|
], |
|
"9.Mix-Pro [Anime] [V4] + INP": [ |
|
{"url": "https://civitai.com/api/download/models/125668", "name": "MixPro_V4.safetensors"}, |
|
{"url": "https://civitai.com/api/download/models/139878", "name": "MixPro_V4-inpainting.safetensors"} |
|
] |
|
} |
|
|
|
vae_list = { |
|
"1.Anime.vae": [{"url": "https://civitai.com/api/download/models/311162", "name": "vae-ft-mse-840000-ema-pruned.vae.safetensors"}], |
|
"2.Anything.vae": [{"url": "https://civitai.com/api/download/models/119279", "name": "Anything.vae.safetensors"}], |
|
"3.Blessed2.vae": [{"url": "https://huggingface.co/NoCrypt/blessed_vae/resolve/main/blessed2.vae.pt", "name": "Blessed2.vae.safetensors"}], |
|
"4.ClearVae.vae": [{"url": "https://civitai.com/api/download/models/88156", "name": "ClearVae_23.vae.safetensors"}], |
|
"5.WD.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors", "name": "WD.vae.safetensors"}] |
|
} |
|
|
|
controlnet_list = { |
|
"1.canny": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors", "name": "control_v11p_sd15_canny_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml", "name": "control_v11p_sd15_canny_fp16.yaml"} |
|
], |
|
"2.openpose": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors", "name": "control_v11p_sd15_openpose_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml", "name": "control_v11p_sd15_openpose_fp16.yaml"} |
|
], |
|
"3.depth": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors", "name": "control_v11f1p_sd15_depth_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml", "name": "control_v11f1p_sd15_depth_fp16.yaml"}, |
|
{"url": "https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors", "name": "control_v11p_sd15_depth_anything_fp16.safetensors"} |
|
], |
|
"4.normal_map": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors", "name": "control_v11p_sd15_normalbae_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml", "name": "control_v11p_sd15_normalbae_fp16.yaml"} |
|
], |
|
"5.mlsd": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors", "name": "control_v11p_sd15_mlsd_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml", "name": "control_v11p_sd15_mlsd_fp16.yaml"} |
|
], |
|
"6.lineart": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors", "name": "control_v11p_sd15_lineart_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors", "name": "control_v11p_sd15s2_lineart_anime_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml", "name": "control_v11p_sd15_lineart_fp16.yaml"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml", "name": "control_v11p_sd15s2_lineart_anime_fp16.yaml"} |
|
], |
|
"7.soft_edge": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors", "name": "control_v11p_sd15_softedge_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml", "name": "control_v11p_sd15_softedge_fp16.yaml"} |
|
], |
|
"8.scribble": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors", "name": "control_v11p_sd15_scribble_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml", "name": "control_v11p_sd15_scribble_fp16.yaml"} |
|
], |
|
"9.segmentation": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors", "name": "control_v11p_sd15_seg_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml", "name": "control_v11p_sd15_seg_fp16.yaml"} |
|
], |
|
"10.shuffle": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors", "name": "control_v11e_sd15_shuffle_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml", "name": "control_v11e_sd15_shuffle_fp16.yaml"} |
|
], |
|
"11.tile": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors", "name": "control_v11f1e_sd15_tile_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml", "name": "control_v11f1e_sd15_tile_fp16.yaml"} |
|
], |
|
"12.inpaint": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors", "name": "control_v11p_sd15_inpaint_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml", "name": "control_v11p_sd15_inpaint_fp16.yaml"} |
|
], |
|
"13.instruct_p2p": [ |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors", "name": "control_v11e_sd15_ip2p_fp16.safetensors"}, |
|
{"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml", "name": "control_v11e_sd15_ip2p_fp16.yaml"} |
|
] |
|
} |
|
|
|
url = "" |
|
prefixes = { |
|
"model": models_dir, |
|
"vae": vaes_dir, |
|
"lora": loras_dir, |
|
"embed": embeddings_dir, |
|
"extension": extensions_dir, |
|
"control": control_dir, |
|
"adetailer": adetailer_dir |
|
} |
|
|
|
extension_repo = [] |
|
directories = [value for key, value in prefixes.items()] |
|
get_ipython().system('mkdir -p {" ".join(directories)}') |
|
|
|
hf_token = huggingface_token if huggingface_token else "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" |
|
user_header = f"\"Authorization: Bearer {hf_token}\"" |
|
|
|
''' Formatted Info Output ''' |
|
|
|
from math import floor |
|
|
|
def center_text(text, terminal_width=45): |
|
text_length = len(text) |
|
left_padding = floor((terminal_width - text_length) / 2) |
|
right_padding = terminal_width - text_length - left_padding |
|
return f"\033[1m\033[36m{' ' * left_padding}{text}{' ' * right_padding}\033[0m\033[32m" |
|
|
|
def format_output(url, dst_dir, file_name): |
|
info = f"[{file_name.split('.')[0]}]" |
|
info = center_text(info) |
|
|
|
print(f"\n\033[32m{'---'*20}]{info}[{'---'*20}") |
|
print(f"\033[33mURL: \033[34m{url}") |
|
print(f"\033[33mSAVE DIR: \033[34m{dst_dir}") |
|
print(f"\033[33mFILE NAME: \033[34m{file_name}\033[0m") |
|
|
|
''' Get Image Preview | CivitAi ''' |
|
|
|
def get_data_from_api(model_id): |
|
"""Fetch model data from the API""" |
|
endpoint_url = f"https://civitai.com/api/v1/model-versions/{model_id}" |
|
headers = {"Content-Type": "application/json"} |
|
try: |
|
response = requests.get(endpoint_url, headers=headers) |
|
response.raise_for_status() |
|
return response.json() |
|
except requests.exceptions.RequestException as e: |
|
print(f"An error occurred: {e}") |
|
return None |
|
|
|
def extract_model_info(data, url): |
|
"""Extract model information based on URL""" |
|
if 'type=' in url: |
|
model_type = parse_qs(urlparse(url).query).get('type', [''])[0] |
|
model_name = data['files'][1]['name'] |
|
else: |
|
model_type = data['model']['type'] |
|
model_name = data['files'][0]['name'] |
|
|
|
|
|
if env == 'Kaggle': |
|
image_url = next((image['url'] for image in data['images'] if image['nsfwLevel'] < 4), None) |
|
else: |
|
image_url = data['images'][0]['url'] |
|
|
|
return model_type, model_name, image_url |
|
|
|
def gen_preview_filename(model_name, image_url): |
|
"""Generate a preview filename""" |
|
name = model_name.split('.') |
|
img_exts = image_url.split('.') |
|
return f"{name[0]}.preview.{img_exts[-1]}" |
|
|
|
''' main download code ''' |
|
|
|
def handle_manual(url): |
|
url_parts = url.split(':', 1) |
|
prefix = url_parts[0] |
|
path = url_parts[1] |
|
|
|
file_name_match = re.search(r'\[(.*?)\]', path) |
|
file_name = file_name_match.group(1) if file_name_match else None |
|
if file_name: |
|
path = re.sub(r'\[.*?\]', '', path) |
|
|
|
if prefix in prefixes: |
|
dir = prefixes[prefix] |
|
if prefix != "extension": |
|
try: |
|
manual_download(path, dir, file_name=file_name) |
|
except Exception as e: |
|
print(f"Error downloading file: {e}") |
|
else: |
|
extension_repo.append((path, file_name)) |
|
|
|
def manual_download(url, dst_dir, file_name): |
|
aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c' |
|
basename = url.split("/")[-1] if file_name is None else file_name |
|
header_option = f"--header={user_header}" |
|
|
|
|
|
support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA') |
|
civitai_token = "62c0c5956b2f9defbd844d754000180b" |
|
|
|
if 'civitai' in url: |
|
url = f"{url}{'&' if '?' in url else '?'}token={civitai_token}" |
|
model_id = url.split('/')[-1].split('?')[0] |
|
clean_url = re.sub(r'[?&]token=[^&]*', '', url) |
|
|
|
data = get_data_from_api(model_id) |
|
if data: |
|
model_type, model_name, image_url = extract_model_info(data, url) |
|
|
|
if any(t in model_type for t in support_types): |
|
if model_name and image_url: |
|
image_file_name = gen_preview_filename(model_name if not file_name else file_name, image_url) |
|
with capture.capture_output() as cap: |
|
get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o {image_file_name} '{image_url}'") |
|
del cap |
|
file_name = file_name or model_name |
|
else: |
|
clean_url = url |
|
|
|
""" Formatted info output """ |
|
model_name_or_basename = file_name if not 'huggingface' in url else basename |
|
format_output(clean_url or url, dst_dir, model_name_or_basename) |
|
|
|
print("\033[31m[Data Info]:\033[0m Failed to retrieve data from the API.\n") if 'civitai' in url and not data else None |
|
if 'civitai' in url and data and any(t in model_type for t in support_types) and (locals().get('image_file_name') or ''): |
|
print(f"\033[32m[Preview DL]:\033[0m {image_file_name} - {image_url}\n") |
|
|
|
|
|
|
|
if 'drive.google' in url: |
|
try: |
|
have_drive_link |
|
except: |
|
get_ipython().system('pip install -U gdown > /dev/null') |
|
have_drive_link = True |
|
|
|
if 'folders' in url: |
|
get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c') |
|
else: |
|
if file_name: |
|
get_ipython().system('gdown "{url}" -O {dst_dir}/{file_name} --fuzzy -c') |
|
else: |
|
get_ipython().system('gdown "{url}" -O {dst_dir} --fuzzy -c') |
|
|
|
|
|
elif 'huggingface' in url: |
|
if '/blob/' in url: |
|
url = url.replace('/blob/', '/resolve/') |
|
get_ipython().system("aria2c {header_option} {aria2_args} -d {dst_dir} -o {basename} '{url}'") |
|
|
|
|
|
elif 'http' in url: |
|
get_ipython().system("aria2c {aria2_args} -d {dst_dir} {'-o' + file_name if file_name else ''} '{url}'") |
|
|
|
def download(url): |
|
links_and_paths = url.split(',') |
|
|
|
for link_or_path in links_and_paths: |
|
link_or_path = link_or_path.strip() |
|
if not link_or_path: |
|
continue |
|
if any(link_or_path.startswith(prefix.lower()) for prefix in prefixes): |
|
handle_manual(link_or_path) |
|
continue |
|
|
|
url, dst_dir, file_name = link_or_path.split() |
|
manual_download(url, dst_dir, file_name) |
|
|
|
unpucking_zip_files() |
|
|
|
|
|
def unpucking_zip_files(): |
|
for directory in directories: |
|
for root, dirs, files in os.walk(directory): |
|
for file in files: |
|
if file.endswith(".zip"): |
|
zip_path = os.path.join(root, file) |
|
extract_path = os.path.splitext(zip_path)[0] |
|
with zipfile.ZipFile(zip_path, 'r') as zip_ref: |
|
zip_ref.extractall(extract_path) |
|
os.remove(zip_path) |
|
|
|
''' submodels - added urls ''' |
|
|
|
def add_submodels(selection, num_selection, model_dict, dst_dir): |
|
if selection == "none": |
|
return [] |
|
if selection == "ALL": |
|
all_models = [] |
|
for models in model_dict.values(): |
|
all_models.extend(models) |
|
selected_models = all_models |
|
else: |
|
selected_models = model_dict[selection] |
|
selected_nums = map(int, num_selection.replace(',', '').split()) |
|
for num in selected_nums: |
|
if 1 <= num <= len(model_dict): |
|
name = list(model_dict)[num - 1] |
|
selected_models.extend(model_dict[name]) |
|
|
|
unique_models = list({model['name']: model for model in selected_models}.values()) |
|
for model in unique_models: |
|
model['dst_dir'] = dst_dir |
|
|
|
return unique_models |
|
|
|
def handle_submodels(selection, num_selection, model_dict, dst_dir, url): |
|
submodels = add_submodels(selection, num_selection, model_dict, dst_dir) |
|
for submodel in submodels: |
|
if not inpainting_model and "inpainting" in submodel['name']: |
|
continue |
|
url += f"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, " |
|
return url |
|
|
|
url = handle_submodels(model, model_num, model_list, models_dir, url) |
|
url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url) |
|
url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url) |
|
|
|
''' file.txt - added urls ''' |
|
|
|
def process_file_download(file_url, prefixes, unique_urls): |
|
files_urls = "" |
|
|
|
if file_url.startswith("http"): |
|
if "blob" in file_url: |
|
file_url = file_url.replace("blob", "raw") |
|
response = requests.get(file_url) |
|
lines = response.text.split('\n') |
|
else: |
|
with open(file_url, 'r') as file: |
|
lines = file.readlines() |
|
|
|
current_tag = None |
|
for line in lines: |
|
line = line.strip() |
|
if any(f'# {tag}' in line.lower() for tag in prefixes): |
|
current_tag = next((tag for tag in prefixes if tag in line.lower())) |
|
|
|
urls = [url.split('#')[0].strip() for url in line.split(',')] |
|
for url in urls: |
|
filter_url = url.split('[')[0] |
|
|
|
if url.startswith("http") and filter_url not in unique_urls: |
|
files_urls += f"{current_tag}:{url}, " |
|
unique_urls.add(filter_url) |
|
|
|
return files_urls |
|
|
|
file_urls = "" |
|
unique_urls = set() |
|
|
|
if custom_file_urls: |
|
for custom_file_url in custom_file_urls.replace(',', '').split(): |
|
if not custom_file_url.endswith('.txt'): |
|
custom_file_url += '.txt' |
|
if not custom_file_url.startswith('http'): |
|
if not custom_file_url.startswith(root_path): |
|
custom_file_url = f'{root_path}/{custom_file_url}' |
|
|
|
try: |
|
file_urls += process_file_download(custom_file_url, prefixes, unique_urls) |
|
except FileNotFoundError: |
|
pass |
|
|
|
|
|
urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url) |
|
prefixed_urls = (f"{prefix}:{url}" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split()) |
|
url += ", ".join(prefixed_urls) + ", " + file_urls |
|
|
|
if detailed_download == "on": |
|
print("\n\n\033[33m# ====== Подробная Загрузка ====== #\n\033[0m") |
|
download(url) |
|
print("\n\033[33m# =============================== #\n\033[0m") |
|
else: |
|
with capture.capture_output() as cap: |
|
download(url) |
|
del cap |
|
|
|
print("\r🏁 Скачивание Завершено!" + " "*15) |
|
|
|
|
|
|
|
get_ipython().system('find {webui_path} \\( -type d \\( -name ".ipynb_checkpoints" -o -name ".aria2" \\) -o -type f -name "*.aria2" \\) -exec rm -r {{}} \\; >/dev/null 2>&1') |
|
|
|
|
|
|
|
if len(extension_repo) > 0: |
|
print("✨ Установка кастомных расширений...", end='', flush=True) |
|
with capture.capture_output() as cap: |
|
for repo, repo_name in extension_repo: |
|
if not repo_name: |
|
repo_name = repo.split('/')[-1] |
|
get_ipython().system('cd {extensions_dir} && git clone {repo} {repo_name} && cd {repo_name} && git fetch') |
|
del cap |
|
print(f"\r📦 Установлено '{len(extension_repo)}', Кастомных расширений!") |
|
|
|
|
|
|
|
if detailed_download == "off": |
|
print("\n\n\033[33mЕсли вы не видете каких-то скаченных файлов, включите в виджетах функцию 'Подробная Загрузка'.") |
|
|
|
get_ipython().run_line_magic('run', '{root_path}/file_cell/special/dl_display_results.py # display widgets result') |
|
|
|
|