{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": { "id": "2lJmbqrs3Mu8" }, "outputs": [], "source": [ "##~ DOWNLOADING CODE | BY: ANXETY ~##\n", "\n", "import os\n", "import re\n", "import time\n", "import json\n", "import shutil\n", "import zipfile\n", "import requests\n", "import subprocess\n", "from datetime import timedelta\n", "from subprocess import getoutput\n", "from IPython.utils import capture\n", "from IPython.display import clear_output\n", "from urllib.parse import urlparse, parse_qs\n", "\n", "\n", "# ================= DETECT ENV =================\n", "def detect_environment():\n", " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n", " environments = {\n", " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n", " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n", " }\n", " for env_var, (environment, path) in environments.items():\n", " if env_var in os.environ:\n", " return environment, path, free_plan\n", "\n", "env, root_path, free_plan = detect_environment()\n", "webui_path = f\"{root_path}/sdw\"\n", "\n", "\n", "# ================ LIBRARIES V2 ================\n", "flag_file = f\"{root_path}/libraries_installed.txt\"\n", "\n", "if not os.path.exists(flag_file):\n", " print(\"💿 Installing the libraries, it's going to take a while:\\n\")\n", "\n", " install_lib = {\n", " \"aria2\": \"apt -y install aria2\",\n", " \"localtunnel\": \"npm install -g localtunnel\",\n", " \"insightface\": \"pip install insightface\"\n", " }\n", "\n", " additional_libs = {\n", " \"Google Colab\": {\n", " \"xformers\": \"pip install xformers==0.0.26.post1 --no-deps\"\n", " },\n", " \"Kaggle\": {\n", " \"xformers\": \"pip install xformers==0.0.26.post1\",\n", " # \"torch\": \"pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\",\n", " \"aiohttp\": \"pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*\" # fix install req\n", " }\n", " }\n", "\n", " if env in additional_libs:\n", " install_lib.update(additional_libs[env])\n", "\n", " # Loop through libraries\n", " for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n", " print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n", " subprocess.run(install_cmd, shell=True, capture_output=True)\n", "\n", " # Additional specific packages\n", " with capture.capture_output() as cap:\n", " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n", " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n", " !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz\n", " del cap\n", "\n", " clear_output()\n", "\n", " # Save file install lib\n", " with open(flag_file, \"w\") as f:\n", " f.write(\">W<'\")\n", "\n", " print(\"🍪 Libraries are installed!\" + \" \"*35)\n", " time.sleep(2)\n", " clear_output()\n", "\n", "\n", "# ================= loading settings V4 =================\n", "def load_settings(path):\n", " if os.path.exists(path):\n", " with open(path, 'r') as file:\n", " return json.load(file)\n", " return {}\n", "\n", "settings = load_settings(f'{root_path}/settings.json')\n", "\n", "VARIABLES = [\n", " 'model', 'model_num', 'inpainting_model',\n", " 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',\n", " 'change_webui', 'detailed_download', 'controlnet',\n", " 'controlnet_num', 'commit_hash', 'huggingface_token',\n", " 'ngrok_token', 'zrok_token', 'commandline_arguments',\n", " 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',\n", " 'Extensions_url', 'custom_file_urls'\n", "]\n", "\n", "locals().update({key: settings.get(key) for key in VARIABLES})\n", "\n", "\n", "# ================= OTHER =================\n", "try:\n", " start_colab\n", "except:\n", " start_colab = int(time.time())-5\n", "\n", "# CONFIG DIR\n", "models_dir = f\"{webui_path}/models/Stable-diffusion\"\n", "vaes_dir = f\"{webui_path}/models/VAE\"\n", "embeddings_dir = f\"{webui_path}/embeddings\"\n", "loras_dir = f\"{webui_path}/models/Lora\"\n", "extensions_dir = f\"{webui_path}/extensions\"\n", "control_dir = f\"{webui_path}/models/ControlNet\"\n", "adetailer_dir = f\"{webui_path}/models/adetailer\"\n", "\n", "\n", "# ================= MAIN CODE =================\n", "if not os.path.exists(webui_path):\n", " start_install = int(time.time())\n", " print(\"⌚ Unpacking Stable Diffusion...\" if change_webui != 'Forge' else \"⌚ Unpacking Stable Diffusion (Forge)...\", end='')\n", " with capture.capture_output() as cap:\n", " aria2_command = \"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M\"\n", " url = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip\" if change_webui != 'Forge' else \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip\"\n", " !{aria2_command} {url} -o repo.zip\n", "\n", " !unzip -q -o repo.zip -d {webui_path}\n", " !rm -rf repo.zip\n", "\n", " %cd {root_path}\n", " os.environ[\"SAFETENSORS_FAST_GPU\"]='1'\n", " os.environ[\"CUDA_MODULE_LOADING\"]=\"LAZY\"\n", " os.environ[\"TF_CPP_MIN_LOG_LEVEL\"] = \"3\"\n", " os.environ[\"PYTHONWARNINGS\"] = \"ignore\"\n", "\n", " !echo -n {start_colab} > {webui_path}/static/colabTimer.txt\n", " del cap\n", " install_time = timedelta(seconds=time.time()-start_install)\n", " print(\"\\r🚀 Unpacking is complete! For\",\"%02d:%02d:%02d ⚡\\n\" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)\n", "else:\n", " print(\"🚀 All unpacked... Skip. ⚡\")\n", " start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n", " time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n", " print(f\"⌚️ You have been conducting this session for - \\033[33m{time_since_start}\\033[0m\")\n", "\n", "\n", "## Changes extensions and WebUi\n", "if latest_webui or latest_exstensions:\n", " action = \"Updating WebUI and Extensions\" if latest_webui and latest_exstensions else (\"WebUI Update\" if latest_webui else \"Update Extensions\")\n", " print(f\"⌚️ {action}...\", end='', flush=True)\n", " with capture.capture_output() as cap:\n", " !git config --global user.email \"you@example.com\"\n", " !git config --global user.name \"Your Name\"\n", "\n", " ## Update Webui\n", " if latest_webui:\n", " %cd {webui_path}\n", " !git restore .\n", " !git pull -X theirs --rebase --autostash\n", "\n", " ## Update extensions\n", " if latest_exstensions:\n", " !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n", " del cap\n", " print(f\"\\r✨ {action} Completed!\")\n", "\n", "\n", "# === FIXING EXTENSIONS ===\n", "anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n", "\n", "with capture.capture_output() as cap:\n", " # --- Umi-Wildcard ---\n", " !sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default\n", "\n", " # --- Encrypt-Image ---\n", " !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui\n", "\n", " # --- Additional-Networks ---\n", " !wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style\n", "del cap\n", "\n", "\n", "## Version switching\n", "if commit_hash:\n", " print('⏳ Time machine activation...', end=\"\", flush=True)\n", " with capture.capture_output() as cap:\n", " %cd {webui_path}\n", " !git config --global user.email \"you@example.com\"\n", " !git config --global user.name \"Your Name\"\n", " !git reset --hard {commit_hash}\n", " del cap\n", " print(f\"\\r⌛️ The time machine has been activated! Current commit: \\033[34m{commit_hash}\\033[0m\")\n", "\n", "\n", "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n", "print(\"📦 Downloading models and stuff...\", end='')\n", "model_list = {\n", " \"1.Anime (by XpucT) + INP\": [\n", " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_v2.safetensors\"},\n", " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_v2-inpainting.safetensors\"}\n", " ],\n", " \"2.BluMix [Anime] [V7] + INP\": [\n", " {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_v7.safetensors\"},\n", " {\"url\": \"https://civitai.com/api/download/models/363850\", \"name\": \"BluMix_v7-inpainting.safetensors\"}\n", " ],\n", " \"3.Cetus-Mix [Anime] [V4] + INP\": [\n", " {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n", " {\"url\": \"https://civitai.com/api/download/models/139882\", \"name\": \"CetusMix_V4-inpainting.safetensors\"}\n", " ],\n", " \"4.Counterfeit [Anime] [V3] + INP\": [\n", " {\"url\": \"https://civitai.com/api/download/models/125050\", \"name\": \"Counterfeit_V3.safetensors\"},\n", " {\"url\": \"https://civitai.com/api/download/models/137911\", \"name\": \"Counterfeit_V3-inpainting.safetensors\"}\n", " ],\n", " \"5.CuteColor [Anime] [V3]\": [\n", " {\"url\": \"https://civitai.com/api/download/models/138754\", \"name\": \"CuteColor_V3.safetensors\"}\n", " ],\n", " \"6.Dark-Sushi-Mix [Anime]\": [\n", " {\"url\": \"https://civitai.com/api/download/models/101640\", \"name\": \"DarkSushiMix_2_5D.safetensors\"},\n", " {\"url\": \"https://civitai.com/api/download/models/56071\", \"name\": \"DarkSushiMix_colorful.safetensors\"}\n", " ],\n", " \"7.Deliberate [Realism] [V6] + INP\": [\n", " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors\", \"name\": \"Deliberate_v6.safetensors\"},\n", " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors\", \"name\": \"Deliberate_v6-inpainting.safetensors\"}\n", " ],\n", " \"8.Meina-Mix [Anime] [V11] + INP\": [\n", " {\"url\": \"https://civitai.com/api/download/models/119057\", \"name\": \"MeinaMix_V11.safetensors\"},\n", " {\"url\": \"https://civitai.com/api/download/models/120702\", \"name\": \"MeinaMix_V11-inpainting.safetensors\"}\n", " ],\n", " \"9.Mix-Pro [Anime] [V4] + INP\": [\n", " {\"url\": \"https://civitai.com/api/download/models/125668\", \"name\": \"MixPro_V4.safetensors\"},\n", " {\"url\": \"https://civitai.com/api/download/models/139878\", \"name\": \"MixPro_V4-inpainting.safetensors\"}\n", " ]\n", "}\n", "\n", "vae_list = {\n", " \"1.Anime.vae\": [{\"url\": \"https://civitai.com/api/download/models/311162\", \"name\": \"vae-ft-mse-840000-ema-pruned.vae.safetensors\"}],\n", " \"2.Anything.vae\": [{\"url\": \"https://civitai.com/api/download/models/119279\", \"name\": \"Anything.vae.safetensors\"}],\n", " \"3.Blessed2.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/blessed_vae/resolve/main/blessed2.vae.pt\", \"name\": \"Blessed2.vae.safetensors\"}],\n", " \"4.ClearVae.vae\": [{\"url\": \"https://civitai.com/api/download/models/88156\", \"name\": \"ClearVae_23.vae.safetensors\"}],\n", " \"5.WD.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors\", \"name\": \"WD.vae.safetensors\"}]\n", "}\n", "\n", "controlnet_list = {\n", " \"1.canny\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors\", \"name\": \"control_v11p_sd15_canny_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml\", \"name\": \"control_v11p_sd15_canny_fp16.yaml\"}\n", " ],\n", " \"2.openpose\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors\", \"name\": \"control_v11p_sd15_openpose_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml\", \"name\": \"control_v11p_sd15_openpose_fp16.yaml\"}\n", " ],\n", " \"3.depth\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors\", \"name\": \"control_v11f1p_sd15_depth_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml\", \"name\": \"control_v11f1p_sd15_depth_fp16.yaml\"},\n", " {\"url\": \"https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors\", \"name\": \"control_v11p_sd15_depth_anything_fp16.safetensors\"}\n", " ],\n", " \"4.normal_map\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors\", \"name\": \"control_v11p_sd15_normalbae_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml\", \"name\": \"control_v11p_sd15_normalbae_fp16.yaml\"}\n", " ],\n", " \"5.mlsd\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors\", \"name\": \"control_v11p_sd15_mlsd_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml\", \"name\": \"control_v11p_sd15_mlsd_fp16.yaml\"}\n", " ],\n", " \"6.lineart\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors\", \"name\": \"control_v11p_sd15_lineart_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml\", \"name\": \"control_v11p_sd15_lineart_fp16.yaml\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.yaml\"}\n", " ],\n", " \"7.soft_edge\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors\", \"name\": \"control_v11p_sd15_softedge_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml\", \"name\": \"control_v11p_sd15_softedge_fp16.yaml\"}\n", " ],\n", " \"8.scribble\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors\", \"name\": \"control_v11p_sd15_scribble_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml\", \"name\": \"control_v11p_sd15_scribble_fp16.yaml\"}\n", " ],\n", " \"9.segmentation\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors\", \"name\": \"control_v11p_sd15_seg_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml\", \"name\": \"control_v11p_sd15_seg_fp16.yaml\"}\n", " ],\n", " \"10.shuffle\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors\", \"name\": \"control_v11e_sd15_shuffle_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml\", \"name\": \"control_v11e_sd15_shuffle_fp16.yaml\"}\n", " ],\n", " \"11.tile\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors\", \"name\": \"control_v11f1e_sd15_tile_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml\", \"name\": \"control_v11f1e_sd15_tile_fp16.yaml\"}\n", " ],\n", " \"12.inpaint\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors\", \"name\": \"control_v11p_sd15_inpaint_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml\", \"name\": \"control_v11p_sd15_inpaint_fp16.yaml\"}\n", " ],\n", " \"13.instruct_p2p\": [\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors\", \"name\": \"control_v11e_sd15_ip2p_fp16.safetensors\"},\n", " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml\", \"name\": \"control_v11e_sd15_ip2p_fp16.yaml\"}\n", " ]\n", "}\n", "\n", "url = \"\"\n", "prefixes = {\n", " \"model\": models_dir,\n", " \"vae\": vaes_dir,\n", " \"lora\": loras_dir,\n", " \"embed\": embeddings_dir,\n", " \"extension\": extensions_dir,\n", " \"control\": control_dir,\n", " \"adetailer\": adetailer_dir\n", "}\n", "\n", "extension_repo = []\n", "directories = [value for key, value in prefixes.items()] # for unpucking zip files\n", "!mkdir -p {\" \".join(directories)}\n", "\n", "hf_token = huggingface_token if huggingface_token else \"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\"\n", "user_header = f\"\\\"Authorization: Bearer {hf_token}\\\"\"\n", "\n", "''' Formatted Info Output '''\n", "\n", "from math import floor\n", "\n", "def center_text(text, terminal_width=45):\n", " text_length = len(text)\n", " left_padding = floor((terminal_width - text_length) / 2)\n", " right_padding = terminal_width - text_length - left_padding\n", " return f\"\\033[1m\\033[36m{' ' * left_padding}{text}{' ' * right_padding}\\033[0m\\033[32m\"\n", "\n", "def format_output(url, dst_dir, file_name):\n", " info = f\"[{file_name.split('.')[0]}]\"\n", " info = center_text(info)\n", "\n", " print(f\"\\n\\033[32m{'---'*20}]{info}[{'---'*20}\")\n", " print(f\"\\033[33mURL: \\033[34m{url}\")\n", " print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n", " print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n", "\n", "''' Get Image Preview | CivitAi '''\n", "\n", "def get_data_from_api(model_id):\n", " \"\"\"Fetch model data from the API\"\"\"\n", " endpoint_url = f\"https://civitai.com/api/v1/model-versions/{model_id}\"\n", " headers = {\"Content-Type\": \"application/json\"}\n", " try:\n", " response = requests.get(endpoint_url, headers=headers)\n", " response.raise_for_status()\n", " return response.json()\n", " except requests.exceptions.RequestException as e:\n", " print(f\"An error occurred: {e}\")\n", " return None\n", "\n", "def extract_model_info(data, url):\n", " \"\"\"Extract model information based on URL\"\"\"\n", " if 'type=' in url:\n", " model_type = parse_qs(urlparse(url).query).get('type', [''])[0]\n", " model_name = data['files'][1]['name']\n", " else:\n", " model_type = data['model']['type']\n", " model_name = data['files'][0]['name']\n", "\n", " # Finding a safe image: less than level 4 | Kaggle\n", " if env == 'Kaggle':\n", " image_url = next((image['url'] for image in data['images'] if image['nsfwLevel'] < 4), None)\n", " else:\n", " image_url = data['images'][0]['url']\n", "\n", " return model_type, model_name, image_url\n", "\n", "def gen_preview_filename(model_name, image_url):\n", " \"\"\"Generate a preview filename\"\"\"\n", " name = model_name.split('.')\n", " img_exts = image_url.split('.')\n", " return f\"{name[0]}.preview.{img_exts[-1]}\"\n", "\n", "''' main download code '''\n", "\n", "def handle_manual(url):\n", " url_parts = url.split(':', 1)\n", " prefix = url_parts[0]\n", " path = url_parts[1]\n", "\n", " file_name_match = re.search(r'\\[(.*?)\\]', path)\n", " file_name = file_name_match.group(1) if file_name_match else None\n", " if file_name:\n", " path = re.sub(r'\\[.*?\\]', '', path)\n", "\n", " if prefix in prefixes:\n", " dir = prefixes[prefix]\n", " if prefix != \"extension\":\n", " try:\n", " manual_download(path, dir, file_name=file_name)\n", " except Exception as e:\n", " print(f\"Error downloading file: {e}\")\n", " else:\n", " extension_repo.append((path, file_name))\n", "\n", "def manual_download(url, dst_dir, file_name):\n", " aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'\n", " basename = url.split(\"/\")[-1] if file_name is None else file_name\n", " header_option = f\"--header={user_header}\"\n", "\n", " # ==== CivitAi API+ ====\n", " support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA') # for dl preview image\n", " civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n", "\n", " if 'civitai' in url:\n", " url = f\"{url}{'&' if '?' in url else '?'}token={civitai_token}\"\n", " model_id = url.split('/')[-1].split('?')[0]\n", " clean_url = re.sub(r'[?&]token=[^&]*', '', url) # hide token\n", "\n", " data = get_data_from_api(model_id)\n", " if data:\n", " model_type, model_name, image_url = extract_model_info(data, url)\n", "\n", " if any(t in model_type for t in support_types):\n", " if model_name and image_url:\n", " image_file_name = gen_preview_filename(model_name if not file_name else file_name, image_url)\n", " with capture.capture_output() as cap:\n", " !aria2c {aria2_args} -d {dst_dir} -o {image_file_name} '{image_url}'\n", " del cap\n", " file_name = file_name or model_name\n", " else:\n", " clean_url = url\n", "\n", " \"\"\" Formatted info output \"\"\"\n", " model_name_or_basename = file_name if not 'huggingface' in url else basename\n", " format_output(clean_url or url, dst_dir, model_name_or_basename)\n", "\n", " print(\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\") if 'civitai' in url and not data else None\n", " if 'civitai' in url and data and any(t in model_type for t in support_types) and (locals().get('image_file_name') or ''):\n", " print(f\"\\033[32m[Preview DL]:\\033[0m {image_file_name} - {image_url}\\n\")\n", " # =====================\n", "\n", " # -- GDrive --\n", " if 'drive.google' in url:\n", " try:\n", " have_drive_link\n", " except:\n", " !pip install -U gdown > /dev/null\n", " have_drive_link = True\n", "\n", " if 'folders' in url:\n", " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n", " else:\n", " if file_name:\n", " !gdown \"{url}\" -O {dst_dir}/{file_name} --fuzzy -c\n", " else:\n", " !gdown \"{url}\" -O {dst_dir} --fuzzy -c\n", "\n", " # -- Hugging Face --\n", " elif 'huggingface' in url:\n", " if '/blob/' in url:\n", " url = url.replace('/blob/', '/resolve/')\n", " !aria2c {header_option} {aria2_args} -d {dst_dir} -o {basename} '{url}'\n", "\n", " # -- Other --\n", " elif 'http' in url:\n", " !aria2c {aria2_args} -d {dst_dir} {'-o' + file_name if file_name else ''} '{url}'\n", "\n", "def download(url):\n", " links_and_paths = url.split(',')\n", "\n", " for link_or_path in links_and_paths:\n", " link_or_path = link_or_path.strip()\n", " if not link_or_path:\n", " continue\n", " if any(link_or_path.startswith(prefix.lower()) for prefix in prefixes):\n", " handle_manual(link_or_path)\n", " continue\n", "\n", " url, dst_dir, file_name = link_or_path.split()\n", " manual_download(url, dst_dir, file_name)\n", "\n", " unpucking_zip_files()\n", "\n", "# unpucking zip files\n", "def unpucking_zip_files():\n", " for directory in directories:\n", " for root, dirs, files in os.walk(directory):\n", " for file in files:\n", " if file.endswith(\".zip\"):\n", " zip_path = os.path.join(root, file)\n", " extract_path = os.path.splitext(zip_path)[0]\n", " with zipfile.ZipFile(zip_path, 'r') as zip_ref:\n", " zip_ref.extractall(extract_path)\n", " os.remove(zip_path)\n", "\n", "''' submodels - added urls '''\n", "\n", "def add_submodels(selection, num_selection, model_dict, dst_dir):\n", " if selection == \"none\":\n", " return []\n", " if selection == \"ALL\":\n", " all_models = []\n", " for models in model_dict.values():\n", " all_models.extend(models)\n", " selected_models = all_models\n", " else:\n", " selected_models = model_dict[selection]\n", " selected_nums = map(int, num_selection.replace(',', '').split())\n", " for num in selected_nums:\n", " if 1 <= num <= len(model_dict):\n", " name = list(model_dict)[num - 1]\n", " selected_models.extend(model_dict[name])\n", "\n", " unique_models = list({model['name']: model for model in selected_models}.values())\n", " for model in unique_models:\n", " model['dst_dir'] = dst_dir\n", "\n", " return unique_models\n", "\n", "def handle_submodels(selection, num_selection, model_dict, dst_dir, url):\n", " submodels = add_submodels(selection, num_selection, model_dict, dst_dir)\n", " for submodel in submodels:\n", " if not inpainting_model and \"inpainting\" in submodel['name']:\n", " continue\n", " url += f\"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, \"\n", " return url\n", "\n", "url = handle_submodels(model, model_num, model_list, models_dir, url)\n", "url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)\n", "url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)\n", "\n", "''' file.txt - added urls '''\n", "\n", "def process_file_download(file_url, prefixes, unique_urls):\n", " files_urls = \"\"\n", "\n", " if file_url.startswith(\"http\"):\n", " if \"blob\" in file_url:\n", " file_url = file_url.replace(\"blob\", \"raw\")\n", " response = requests.get(file_url)\n", " lines = response.text.split('\\n')\n", " else:\n", " with open(file_url, 'r') as file:\n", " lines = file.readlines()\n", "\n", " current_tag = None\n", " for line in lines:\n", " line = line.strip()\n", " if any(f'# {tag}' in line.lower() for tag in prefixes):\n", " current_tag = next((tag for tag in prefixes if tag in line.lower()))\n", "\n", " urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n", " for url in urls:\n", " filter_url = url.split('[')[0] # same url filter\n", "\n", " if url.startswith(\"http\") and filter_url not in unique_urls:\n", " files_urls += f\"{current_tag}:{url}, \"\n", " unique_urls.add(filter_url)\n", "\n", " return files_urls\n", "\n", "file_urls = \"\"\n", "unique_urls = set()\n", "\n", "if custom_file_urls:\n", " for custom_file_url in custom_file_urls.replace(',', '').split():\n", " if not custom_file_url.endswith('.txt'):\n", " custom_file_url += '.txt'\n", " if not custom_file_url.startswith('http'):\n", " if not custom_file_url.startswith(root_path):\n", " custom_file_url = f'{root_path}/{custom_file_url}'\n", "\n", " try:\n", " file_urls += process_file_download(custom_file_url, prefixes, unique_urls)\n", " except FileNotFoundError:\n", " pass\n", "\n", "# url prefixing\n", "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n", "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())\n", "url += \", \".join(prefixed_urls) + \", \" + file_urls\n", "\n", "if detailed_download == \"on\":\n", " print(\"\\n\\n\\033[33m# ====== Detailed Download ====== #\\n\\033[0m\")\n", " download(url)\n", " print(\"\\n\\033[33m# =============================== #\\n\\033[0m\")\n", "else:\n", " with capture.capture_output() as cap:\n", " download(url)\n", " del cap\n", "\n", "print(\"\\r🏁 Download Complete!\" + \" \"*15)\n", "\n", "\n", "# Cleaning shit after downloading...\n", "!find {webui_path} \\( -type d \\( -name \".ipynb_checkpoints\" -o -name \".aria2\" \\) -o -type f -name \"*.aria2\" \\) -exec rm -r {{}} \\; >/dev/null 2>&1\n", "\n", "\n", "## Install of Custom extensions\n", "if len(extension_repo) > 0:\n", " print(\"✨ Installing custom extensions...\", end='', flush=True)\n", " with capture.capture_output() as cap:\n", " for repo, repo_name in extension_repo:\n", " if not repo_name:\n", " repo_name = repo.split('/')[-1]\n", " !cd {extensions_dir} \\\n", " && git clone {repo} {repo_name} \\\n", " && cd {repo_name} \\\n", " && git fetch\n", " del cap\n", " print(f\"\\r📦 Installed '{len(extension_repo)}', Custom extensions!\")\n", "\n", "\n", "## List Models and stuff V2\n", "if detailed_download == \"off\":\n", " print(\"\\n\\n\\033[33mIf you don't see any downloaded files, enable the 'Detailed Downloads' feature in the widget.\")\n", "\n", "%run {root_path}/file_cell/special/dl_display_results.py # display widgets result" ] } ], "metadata": { "colab": { "provenance": [] }, "kernelspec": { "display_name": "Python 3", "name": "python3" }, "language_info": { "name": "python" } }, "nbformat": 4, "nbformat_minor": 0 }