NagisaNao commited on
Commit
70d8ee9
·
1 Parent(s): 995f5f2

🗑️ I don't remember what's changed.

Browse files
files_cells/notebooks/en/downloading_en.ipynb CHANGED
@@ -1,696 +1,698 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": null,
6
- "metadata": {
7
- "id": "2lJmbqrs3Mu8"
8
- },
9
- "outputs": [],
10
- "source": [
11
- "##~ DOWNLOADING CODE | BY: ANXETY ~##\n",
12
- "\n",
13
- "import os\n",
14
- "import re\n",
15
- "import time\n",
16
- "import json\n",
17
- "import shutil\n",
18
- "import zipfile\n",
19
- "import requests\n",
20
- "import subprocess\n",
21
- "from datetime import timedelta\n",
22
- "from subprocess import getoutput\n",
23
- "from IPython.utils import capture\n",
24
- "from IPython.display import clear_output\n",
25
- "from urllib.parse import urlparse, parse_qs\n",
26
- "\n",
27
- "\n",
28
- "# ================= DETECT ENV =================\n",
29
- "def detect_environment():\n",
30
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
31
- " environments = {\n",
32
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
33
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
34
- " }\n",
35
- " for env_var, (environment, path) in environments.items():\n",
36
- " if env_var in os.environ:\n",
37
- " return environment, path, free_plan\n",
38
- "\n",
39
- "env, root_path, free_plan = detect_environment()\n",
40
- "webui_path = f\"{root_path}/sdw\"\n",
41
- "\n",
42
- "\n",
43
- "# ================ LIBRARIES V2 ================\n",
44
- "flag_file = f\"{root_path}/libraries_installed.txt\"\n",
45
- "\n",
46
- "if not os.path.exists(flag_file):\n",
47
- " print(\"💿 Installing the libraries, it's going to take a while:\\n\")\n",
48
- "\n",
49
- " install_lib = {\n",
50
- " \"aria2\": \"apt -y install aria2\",\n",
51
- " \"localtunnel\": \"npm install -g localtunnel\",\n",
52
- " \"insightface\": \"pip install insightface\"\n",
53
- " }\n",
54
- "\n",
55
- " additional_libs = {\n",
56
- " \"Google Colab\": {\n",
57
- " \"xformers\": \"pip install xformers==0.0.26.post1 --no-deps\"\n",
58
- " },\n",
59
- " \"Kaggle\": {\n",
60
- " \"xformers\": \"pip install xformers==0.0.26.post1\",\n",
61
- " # \"torch\": \"pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\",\n",
62
- " \"aiohttp\": \"pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*\" # fix install req\n",
63
- " }\n",
64
- " }\n",
65
- "\n",
66
- " if env in additional_libs:\n",
67
- " install_lib.update(additional_libs[env])\n",
68
- "\n",
69
- " # Loop through libraries\n",
70
- " for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n",
71
- " print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n",
72
- " subprocess.run(install_cmd, shell=True, capture_output=True)\n",
73
- "\n",
74
- " # Additional specific packages\n",
75
- " with capture.capture_output() as cap:\n",
76
- " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
77
- " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
78
- " !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz\n",
79
- " del cap\n",
80
- "\n",
81
- " clear_output()\n",
82
- "\n",
83
- " # Save file install lib\n",
84
- " with open(flag_file, \"w\") as f:\n",
85
- " f.write(\">W<'\")\n",
86
- "\n",
87
- " print(\"🍪 Libraries are installed!\" + \" \"*35)\n",
88
- " time.sleep(2)\n",
89
- " clear_output()\n",
90
- "\n",
91
- "\n",
92
- "# ================= loading settings V4 =================\n",
93
- "def load_settings(path):\n",
94
- " if os.path.exists(path):\n",
95
- " with open(path, 'r') as file:\n",
96
- " return json.load(file)\n",
97
- " return {}\n",
98
- "\n",
99
- "settings = load_settings(f'{root_path}/settings.json')\n",
100
- "\n",
101
- "VARIABLES = [\n",
102
- " 'model', 'model_num', 'inpainting_model',\n",
103
- " 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',\n",
104
- " 'change_webui', 'detailed_download', 'controlnet',\n",
105
- " 'controlnet_num', 'commit_hash', 'huggingface_token',\n",
106
- " 'ngrok_token', 'zrok_token', 'commandline_arguments',\n",
107
- " 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',\n",
108
- " 'Extensions_url', 'custom_file_urls'\n",
109
- "]\n",
110
- "\n",
111
- "locals().update({key: settings.get(key) for key in VARIABLES})\n",
112
- "\n",
113
- "\n",
114
- "# ================= OTHER =================\n",
115
- "try:\n",
116
- " start_colab\n",
117
- "except:\n",
118
- " start_colab = int(time.time())-5\n",
119
- "\n",
120
- "# CONFIG DIR\n",
121
- "models_dir = f\"{webui_path}/models/Stable-diffusion\"\n",
122
- "vaes_dir = f\"{webui_path}/models/VAE\"\n",
123
- "embeddings_dir = f\"{webui_path}/embeddings\"\n",
124
- "loras_dir = f\"{webui_path}/models/Lora\"\n",
125
- "extensions_dir = f\"{webui_path}/extensions\"\n",
126
- "control_dir = f\"{webui_path}/models/ControlNet\"\n",
127
- "adetailer_dir = f\"{webui_path}/models/adetailer\"\n",
128
- "\n",
129
- "\n",
130
- "# ================= MAIN CODE =================\n",
131
- "if not os.path.exists(webui_path):\n",
132
- " start_install = int(time.time())\n",
133
- " print(\"⌚ Unpacking Stable Diffusion...\" if change_webui != 'Forge' else \"⌚ Unpacking Stable Diffusion (Forge)...\", end='')\n",
134
- " with capture.capture_output() as cap:\n",
135
- " aria2_command = \"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M\"\n",
136
- " url = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip\" if change_webui != 'Forge' else \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip\"\n",
137
- " !{aria2_command} {url} -o repo.zip\n",
138
- "\n",
139
- " !unzip -q -o repo.zip -d {webui_path}\n",
140
- " !rm -rf repo.zip\n",
141
- "\n",
142
- " %cd {root_path}\n",
143
- " os.environ[\"SAFETENSORS_FAST_GPU\"]='1'\n",
144
- " os.environ[\"CUDA_MODULE_LOADING\"]=\"LAZY\"\n",
145
- " os.environ[\"TF_CPP_MIN_LOG_LEVEL\"] = \"3\"\n",
146
- " os.environ[\"PYTHONWARNINGS\"] = \"ignore\"\n",
147
- "\n",
148
- " !echo -n {start_colab} > {webui_path}/static/colabTimer.txt\n",
149
- " del cap\n",
150
- " install_time = timedelta(seconds=time.time()-start_install)\n",
151
- " print(\"\\r🚀 Unpacking is complete! For\",\"%02d:%02d:%02d ⚡\\n\" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)\n",
152
- "else:\n",
153
- " print(\"🚀 All unpacked... Skip. ⚡\")\n",
154
- " start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
155
- " time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
156
- " print(f\"⌚️ You have been conducting this session for - \\033[33m{time_since_start}\\033[0m\")\n",
157
- "\n",
158
- "\n",
159
- "## Changes extensions and WebUi\n",
160
- "if latest_webui or latest_exstensions:\n",
161
- " action = \"Updating WebUI and Extensions\" if latest_webui and latest_exstensions else (\"WebUI Update\" if latest_webui else \"Update Extensions\")\n",
162
- " print(f\"⌚️ {action}...\", end='', flush=True)\n",
163
- " with capture.capture_output() as cap:\n",
164
- " !git config --global user.email \"[email protected]\"\n",
165
- " !git config --global user.name \"Your Name\"\n",
166
- "\n",
167
- " ## Update Webui\n",
168
- " if latest_webui:\n",
169
- " %cd {webui_path}\n",
170
- " !git restore .\n",
171
- " !git pull -X theirs --rebase --autostash\n",
172
- "\n",
173
- " ## Update extensions\n",
174
- " if latest_exstensions:\n",
175
- " !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n",
176
- " del cap\n",
177
- " print(f\"\\r✨ {action} Completed!\")\n",
178
- "\n",
179
- "\n",
180
- "# === FIXING EXTENSIONS ===\n",
181
- "anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n",
182
- "\n",
183
- "with capture.capture_output() as cap:\n",
184
- " # --- Umi-Wildcard ---\n",
185
- " !sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default\n",
186
- "\n",
187
- " # --- Encrypt-Image ---\n",
188
- " !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui\n",
189
- "\n",
190
- " # --- Additional-Networks ---\n",
191
- " !wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style\n",
192
- "del cap\n",
193
- "\n",
194
- "\n",
195
- "## Version switching\n",
196
- "if commit_hash:\n",
197
- " print('⏳ Time machine activation...', end=\"\", flush=True)\n",
198
- " with capture.capture_output() as cap:\n",
199
- " %cd {webui_path}\n",
200
- " !git config --global user.email \"[email protected]\"\n",
201
- " !git config --global user.name \"Your Name\"\n",
202
- " !git reset --hard {commit_hash}\n",
203
- " del cap\n",
204
- " print(f\"\\r⌛️ The time machine has been activated! Current commit: \\033[34m{commit_hash}\\033[0m\")\n",
205
- "\n",
206
- "\n",
207
- "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n",
208
- "print(\"📦 Downloading models and stuff...\", end='')\n",
209
- "model_list = {\n",
210
- " \"1.Anime (by XpucT) + INP\": [\n",
211
- " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_V2.safetensors\"},\n",
212
- " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_V2-inpainting.safetensors\"}\n",
213
- " ],\n",
214
- " \"2.BluMix [Anime] [V7] + INP\": [\n",
215
- " {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_V7.safetensors\"},\n",
216
- " {\"url\": \"https://civitai.com/api/download/models/363850\", \"name\": \"BluMix_V7-inpainting.safetensors\"}\n",
217
- " ],\n",
218
- " \"3.Cetus-Mix [Anime] [V4] + INP\": [\n",
219
- " {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n",
220
- " {\"url\": \"https://civitai.com/api/download/models/139882\", \"name\": \"CetusMix_V4-inpainting.safetensors\"}\n",
221
- " ],\n",
222
- " \"4.Counterfeit [Anime] [V3] + INP\": [\n",
223
- " {\"url\": \"https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors\", \"name\": \"Counterfeit_V3.safetensors\"},\n",
224
- " {\"url\": \"https://civitai.com/api/download/models/137911\", \"name\": \"Counterfeit_V3-inpainting.safetensors\"}\n",
225
- " ],\n",
226
- " \"5.CuteColor [Anime] [V3]\": [\n",
227
- " {\"url\": \"https://civitai.com/api/download/models/138754\", \"name\": \"CuteColor_V3.safetensors\"}\n",
228
- " ],\n",
229
- " \"6.Dark-Sushi-Mix [Anime]\": [\n",
230
- " {\"url\": \"https://civitai.com/api/download/models/101640\", \"name\": \"DarkSushiMix_2_5D.safetensors\"},\n",
231
- " {\"url\": \"https://civitai.com/api/download/models/56071\", \"name\": \"DarkSushiMix_colorful.safetensors\"}\n",
232
- " ],\n",
233
- " \"7.Deliberate [Realism] [V6] + INP\": [\n",
234
- " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors\", \"name\": \"Deliberate_V6.safetensors\"},\n",
235
- " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors\", \"name\": \"Deliberate_V6-inpainting.safetensors\"}\n",
236
- " ],\n",
237
- " \"8.Meina-Mix [Anime] [V11] + INP\": [\n",
238
- " {\"url\": \"https://civitai.com/api/download/models/119057\", \"name\": \"MeinaMix_V11.safetensors\"},\n",
239
- " {\"url\": \"https://civitai.com/api/download/models/120702\", \"name\": \"MeinaMix_V11-inpainting.safetensors\"}\n",
240
- " ],\n",
241
- " \"9.Mix-Pro [Anime] [V4] + INP\": [\n",
242
- " {\"url\": \"https://civitai.com/api/download/models/125668\", \"name\": \"MixPro_V4.safetensors\"},\n",
243
- " {\"url\": \"https://civitai.com/api/download/models/139878\", \"name\": \"MixPro_V4-inpainting.safetensors\"}\n",
244
- " ]\n",
245
- "}\n",
246
- "\n",
247
- "vae_list = {\n",
248
- " \"1.Anime.vae\": [{\"url\": \"https://civitai.com/api/download/models/311162\", \"name\": \"vae-ft-mse-840000-ema-pruned.vae.safetensors\"}],\n",
249
- " \"2.Anything.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors\", \"name\": \"Anything.vae.safetensors\"}],\n",
250
- " \"3.Blessed2.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors\", \"name\": \"Blessed2.vae.safetensors\"}],\n",
251
- " \"4.ClearVae.vae\": [{\"url\": \"https://civitai.com/api/download/models/88156\", \"name\": \"ClearVae_23.vae.safetensors\"}],\n",
252
- " \"5.WD.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors\", \"name\": \"WD.vae.safetensors\"}]\n",
253
- "}\n",
254
- "\n",
255
- "controlnet_list = {\n",
256
- " \"1.canny\": [\n",
257
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors\", \"name\": \"control_v11p_sd15_canny_fp16.safetensors\"},\n",
258
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml\", \"name\": \"control_v11p_sd15_canny_fp16.yaml\"}\n",
259
- " ],\n",
260
- " \"2.openpose\": [\n",
261
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors\", \"name\": \"control_v11p_sd15_openpose_fp16.safetensors\"},\n",
262
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml\", \"name\": \"control_v11p_sd15_openpose_fp16.yaml\"}\n",
263
- " ],\n",
264
- " \"3.depth\": [\n",
265
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors\", \"name\": \"control_v11f1p_sd15_depth_fp16.safetensors\"},\n",
266
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml\", \"name\": \"control_v11f1p_sd15_depth_fp16.yaml\"},\n",
267
- " {\"url\": \"https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors\", \"name\": \"control_v11p_sd15_depth_anything_fp16.safetensors\"}\n",
268
- " ],\n",
269
- " \"4.normal_map\": [\n",
270
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors\", \"name\": \"control_v11p_sd15_normalbae_fp16.safetensors\"},\n",
271
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml\", \"name\": \"control_v11p_sd15_normalbae_fp16.yaml\"}\n",
272
- " ],\n",
273
- " \"5.mlsd\": [\n",
274
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors\", \"name\": \"control_v11p_sd15_mlsd_fp16.safetensors\"},\n",
275
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml\", \"name\": \"control_v11p_sd15_mlsd_fp16.yaml\"}\n",
276
- " ],\n",
277
- " \"6.lineart\": [\n",
278
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors\", \"name\": \"control_v11p_sd15_lineart_fp16.safetensors\"},\n",
279
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.safetensors\"},\n",
280
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml\", \"name\": \"control_v11p_sd15_lineart_fp16.yaml\"},\n",
281
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.yaml\"}\n",
282
- " ],\n",
283
- " \"7.soft_edge\": [\n",
284
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors\", \"name\": \"control_v11p_sd15_softedge_fp16.safetensors\"},\n",
285
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml\", \"name\": \"control_v11p_sd15_softedge_fp16.yaml\"}\n",
286
- " ],\n",
287
- " \"8.scribble\": [\n",
288
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors\", \"name\": \"control_v11p_sd15_scribble_fp16.safetensors\"},\n",
289
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml\", \"name\": \"control_v11p_sd15_scribble_fp16.yaml\"}\n",
290
- " ],\n",
291
- " \"9.segmentation\": [\n",
292
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors\", \"name\": \"control_v11p_sd15_seg_fp16.safetensors\"},\n",
293
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml\", \"name\": \"control_v11p_sd15_seg_fp16.yaml\"}\n",
294
- " ],\n",
295
- " \"10.shuffle\": [\n",
296
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors\", \"name\": \"control_v11e_sd15_shuffle_fp16.safetensors\"},\n",
297
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml\", \"name\": \"control_v11e_sd15_shuffle_fp16.yaml\"}\n",
298
- " ],\n",
299
- " \"11.tile\": [\n",
300
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors\", \"name\": \"control_v11f1e_sd15_tile_fp16.safetensors\"},\n",
301
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml\", \"name\": \"control_v11f1e_sd15_tile_fp16.yaml\"}\n",
302
- " ],\n",
303
- " \"12.inpaint\": [\n",
304
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors\", \"name\": \"control_v11p_sd15_inpaint_fp16.safetensors\"},\n",
305
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml\", \"name\": \"control_v11p_sd15_inpaint_fp16.yaml\"}\n",
306
- " ],\n",
307
- " \"13.instruct_p2p\": [\n",
308
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors\", \"name\": \"control_v11e_sd15_ip2p_fp16.safetensors\"},\n",
309
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml\", \"name\": \"control_v11e_sd15_ip2p_fp16.yaml\"}\n",
310
- " ]\n",
311
- "}\n",
312
- "\n",
313
- "url = \"\"\n",
314
- "prefixes = {\n",
315
- " \"model\": models_dir,\n",
316
- " \"vae\": vaes_dir,\n",
317
- " \"lora\": loras_dir,\n",
318
- " \"embed\": embeddings_dir,\n",
319
- " \"extension\": extensions_dir,\n",
320
- " \"control\": control_dir,\n",
321
- " \"adetailer\": adetailer_dir,\n",
322
- " \"config\": webui_path\n",
323
- "}\n",
324
- "\n",
325
- "extension_repo = []\n",
326
- "directories = [value for key, value in prefixes.items()] # for unpucking zip files\n",
327
- "!mkdir -p {\" \".join(directories)}\n",
328
- "\n",
329
- "hf_token = huggingface_token if huggingface_token else \"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\"\n",
330
- "user_header = f\"\\\"Authorization: Bearer {hf_token}\\\"\"\n",
331
- "\n",
332
- "''' Formatted Info Output '''\n",
333
- "\n",
334
- "from math import floor\n",
335
- "\n",
336
- "def center_text(text, terminal_width=45):\n",
337
- " text_length = len(text)\n",
338
- " left_padding = floor((terminal_width - text_length) / 2)\n",
339
- " right_padding = terminal_width - text_length - left_padding\n",
340
- " return f\"\\033[1m\\033[36m{' ' * left_padding}{text}{' ' * right_padding}\\033[0m\\033[32m\"\n",
341
- "\n",
342
- "def format_output(url, dst_dir, file_name):\n",
343
- " info = f\"[{file_name.split('.')[0]}]\"\n",
344
- " info = center_text(info)\n",
345
- "\n",
346
- " print(f\"\\n\\033[32m{'---'*20}]{info}[{'---'*20}\")\n",
347
- " print(f\"\\033[33mURL: \\033[34m{url}\")\n",
348
- " print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n",
349
- " print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
350
- "\n",
351
- "''' GET CivitAi API - DATA '''\n",
352
- "\n",
353
- "def strip_(url, file_name=None):\n",
354
- " if 'github.com' in url:\n",
355
- " if '/blob/' in url:\n",
356
- " url = url.replace('/blob/', '/raw/')\n",
357
- "\n",
358
- " elif \"civitai.com\" in url:\n",
359
- " return CivitAi_API(url, file_name)\n",
360
- "\n",
361
- " elif \"huggingface.co\" in url:\n",
362
- " if '/blob/' in url:\n",
363
- " url = url.replace('/blob/', '/resolve/')\n",
364
- " if '?' in url:\n",
365
- " url = url.split('?')[0]\n",
366
- "\n",
367
- " return url\n",
368
- "\n",
369
- "def CivitAi_API(url, file_name=None):\n",
370
- " support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')\n",
371
- " civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n",
372
- "\n",
373
- " if '?token=' in url:\n",
374
- " url = url.split('?token=')[0]\n",
375
- " if '?type=' in url:\n",
376
- " url = url.replace('?type=', f'?token={civitai_token}&type=')\n",
377
- " else:\n",
378
- " url = f\"{url}?token={civitai_token}\"\n",
379
- "\n",
380
- " # Determine model or version id\n",
381
- " if \"civitai.com/models/\" in url:\n",
382
- " if '?modelVersionId=' in url:\n",
383
- " version_id = url.split('?modelVersionId=')[1]\n",
384
- " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
385
- " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
386
- " else:\n",
387
- " model_id = url.split('/models/')[1].split('/')[0]\n",
388
- " response = requests.get(f\"https://civitai.com/api/v1/models/{model_id}\")\n",
389
- " # print(f\"end - https://civitai.com/api/v1/models/{model_id}\")\n",
390
- " else:\n",
391
- " version_id = url.split('/models/')[1].split('/')[0]\n",
392
- " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
393
- " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
394
- "\n",
395
- " data = response.json()\n",
396
- "\n",
397
- " if response.status_code != 200:\n",
398
- " return None, None, None, None, None, None, None\n",
399
- "\n",
400
- " # Define model type and name\n",
401
- " if \"civitai.com/models/\" in url:\n",
402
- " if '?modelVersionId=' in url:\n",
403
- " model_type = data['model']['type']\n",
404
- " model_name = data['files'][0]['name']\n",
405
- " else:\n",
406
- " model_type = data['type']\n",
407
- " model_name = data['modelVersions'][0]['files'][0]['name']\n",
408
- " elif 'type=' in url:\n",
409
- " model_type = parse_qs(urlparse(url).query).get('type', [''])[0]\n",
410
- " if 'model' in model_type.lower():\n",
411
- " model_name = data['files'][0]['name']\n",
412
- " else:\n",
413
- " model_name = data['files'][1]['name']\n",
414
- " else:\n",
415
- " model_type = data['model']['type']\n",
416
- " model_name = data['files'][0]['name']\n",
417
- "\n",
418
- " model_name = file_name or model_name\n",
419
- "\n",
420
- " # Determine DownloadUrl\n",
421
- " if \"civitai.com/models/\" in url:\n",
422
- " if '?modelVersionId=' in url:\n",
423
- " download_url = data.get('downloadUrl')\n",
424
- " else:\n",
425
- " download_url = data[\"modelVersions\"][0].get(\"downloadUrl\", \"\")\n",
426
- " elif 'type=' in url:\n",
427
- " if any(t.lower() in model_type.lower() for t in support_types):\n",
428
- " download_url = data['files'][0]['downloadUrl']\n",
429
- " else:\n",
430
- " download_url = data['files'][1]['downloadUrl']\n",
431
- " else:\n",
432
- " download_url = data.get('downloadUrl')\n",
433
- "\n",
434
- " clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token\n",
435
- "\n",
436
- " # Find a safe image: level less than 4 | Kaggle\n",
437
- " image_url, image_name = None, None\n",
438
- " if any(t in model_type for t in support_types):\n",
439
- " try:\n",
440
- " images = data.get('images') or data['modelVersions'][0].get('images', [])\n",
441
- " if env == 'Kaggle':\n",
442
- " image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)\n",
443
- " else:\n",
444
- " image_url = images[0]['url'] if images else None\n",
445
- " except KeyError:\n",
446
- " pass\n",
447
- "\n",
448
- " # Generate a name to save the image\n",
449
- " image_name = f\"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}\" if image_url else None\n",
450
- "\n",
451
- " return f\"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}\", clean_url, model_type, model_name, image_url, image_name, data\n",
452
- "\n",
453
- "''' Main Download Code '''\n",
454
- "\n",
455
- "def download(url):\n",
456
- " links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]\n",
457
- "\n",
458
- " for link_or_path in links_and_paths:\n",
459
- " if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):\n",
460
- " handle_manual(link_or_path)\n",
461
- " else:\n",
462
- " url, dst_dir, file_name = link_or_path.split()\n",
463
- " manual_download(url, dst_dir, file_name)\n",
464
- "\n",
465
- " unpack_zip_files()\n",
466
- "\n",
467
- "def unpack_zip_files():\n",
468
- " for directory in directories:\n",
469
- " for root, _, files in os.walk(directory):\n",
470
- " for file in files:\n",
471
- " if file.endswith(\".zip\"):\n",
472
- " zip_path = os.path.join(root, file)\n",
473
- " extract_path = os.path.splitext(zip_path)[0]\n",
474
- " with zipfile.ZipFile(zip_path, 'r') as zip_ref:\n",
475
- " zip_ref.extractall(extract_path)\n",
476
- " os.remove(zip_path)\n",
477
- "\n",
478
- "def handle_manual(url):\n",
479
- " url_parts = url.split(':', 1)\n",
480
- " prefix, path = url_parts[0], url_parts[1]\n",
481
- "\n",
482
- " file_name_match = re.search(r'\\[(.*?)\\]', path)\n",
483
- " file_name = file_name_match.group(1) if file_name_match else None\n",
484
- " if file_name:\n",
485
- " path = re.sub(r'\\[.*?\\]', '', path)\n",
486
- "\n",
487
- " if prefix in prefixes:\n",
488
- " dir = prefixes[prefix]\n",
489
- " if prefix != \"extension\":\n",
490
- " try:\n",
491
- " manual_download(path, dir, file_name=file_name)\n",
492
- " except Exception as e:\n",
493
- " print(f\"Error downloading file: {e}\")\n",
494
- " else:\n",
495
- " extension_repo.append((path, file_name))\n",
496
- "\n",
497
- "def manual_download(url, dst_dir, file_name):\n",
498
- " aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'\n",
499
- " basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
500
- " header_option = f\"--header={user_header}\"\n",
501
- "\n",
502
- " if 'github.com' in url:\n",
503
- " url = strip_(url)\n",
504
- "\n",
505
- " # -- CivitAi APi+ V2 --\n",
506
- " elif 'civitai' in url:\n",
507
- " url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)\n",
508
- "\n",
509
- " if image_url and image_name:\n",
510
- " with capture.capture_output() as cap:\n",
511
- " !aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'\n",
512
- " del cap\n",
513
- "\n",
514
- " elif \"huggingface.co\" in url:\n",
515
- " clean_url = strip_(url)\n",
516
- "\n",
517
- " \"\"\" Formatted info output \"\"\"\n",
518
- " model_name_or_basename = file_name if not 'huggingface' in url else basename\n",
519
- " format_output(clean_url or url, dst_dir, model_name_or_basename)\n",
520
- "\n",
521
- " # ## -- for my tests --\n",
522
- " # print(url, dst_dir, model_name_or_basename)\n",
523
- " print(f\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\") if 'civitai' in url and not data else None\n",
524
- " if 'civitai' in url and data and image_name:\n",
525
- " print(f\"\\033[32m[Preview DL]:\\033[0m {image_name} - {image_url}\\n\")\n",
526
- " # =====================\n",
527
- "\n",
528
- " # # -- Git Hub --\n",
529
- " if 'github.com' in url or 'githubusercontent.com' in url:\n",
530
- " !aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
531
- "\n",
532
- " # -- GDrive --\n",
533
- " elif 'drive.google' in url:\n",
534
- " try:\n",
535
- " have_drive_link\n",
536
- " except:\n",
537
- " !pip install -U gdown > /dev/null\n",
538
- " have_drive_link = True\n",
539
- "\n",
540
- " if 'folders' in url:\n",
541
- " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n",
542
- " else:\n",
543
- " if file_name:\n",
544
- " !gdown \"{url}\" -O {dst_dir}/{file_name} --fuzzy -c\n",
545
- " else:\n",
546
- " !gdown \"{url}\" -O {dst_dir} --fuzzy -c\n",
547
- "\n",
548
- " # -- Hugging Face --\n",
549
- " elif 'huggingface' in url:\n",
550
- " !aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
551
- "\n",
552
- " # -- Other --\n",
553
- " elif 'http' in url:\n",
554
- " !aria2c {aria2_args} -d {dst_dir} '{'-o' + file_name if file_name else ''}' '{url}'\n",
555
- "\n",
556
- "''' SubModels - Added URLs '''\n",
557
- "\n",
558
- "def add_submodels(selection, num_selection, model_dict, dst_dir):\n",
559
- " if selection == \"none\":\n",
560
- " return []\n",
561
- " if selection == \"ALL\":\n",
562
- " all_models = []\n",
563
- " for models in model_dict.values():\n",
564
- " all_models.extend(models)\n",
565
- " selected_models = all_models\n",
566
- " else:\n",
567
- " selected_models = model_dict[selection]\n",
568
- " selected_nums = map(int, num_selection.replace(',', '').split())\n",
569
- " for num in selected_nums:\n",
570
- " if 1 <= num <= len(model_dict):\n",
571
- " name = list(model_dict)[num - 1]\n",
572
- " selected_models.extend(model_dict[name])\n",
573
- "\n",
574
- " unique_models = list({model['name']: model for model in selected_models}.values())\n",
575
- " for model in unique_models:\n",
576
- " model['dst_dir'] = dst_dir\n",
577
- "\n",
578
- " return unique_models\n",
579
- "\n",
580
- "def handle_submodels(selection, num_selection, model_dict, dst_dir, url):\n",
581
- " submodels = add_submodels(selection, num_selection, model_dict, dst_dir)\n",
582
- " for submodel in submodels:\n",
583
- " if not inpainting_model and \"inpainting\" in submodel['name']:\n",
584
- " continue\n",
585
- " url += f\"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, \"\n",
586
- " return url\n",
587
- "\n",
588
- "url = handle_submodels(model, model_num, model_list, models_dir, url)\n",
589
- "url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)\n",
590
- "url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)\n",
591
- "\n",
592
- "''' file.txt - added urls '''\n",
593
- "\n",
594
- "def process_file_download(file_url, prefixes, unique_urls):\n",
595
- " files_urls = \"\"\n",
596
- "\n",
597
- " if file_url.startswith(\"http\"):\n",
598
- " if \"blob\" in file_url:\n",
599
- " file_url = file_url.replace(\"blob\", \"raw\")\n",
600
- " response = requests.get(file_url)\n",
601
- " lines = response.text.split('\\n')\n",
602
- " else:\n",
603
- " with open(file_url, 'r') as file:\n",
604
- " lines = file.readlines()\n",
605
- "\n",
606
- " current_tag = None\n",
607
- " for line in lines:\n",
608
- " line = line.strip()\n",
609
- " if any(f'# {tag}' in line.lower() for tag in prefixes):\n",
610
- " current_tag = next((tag for tag in prefixes if tag in line.lower()))\n",
611
- "\n",
612
- " urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n",
613
- " for url in urls:\n",
614
- " filter_url = url.split('[')[0] # same url filter\n",
615
- "\n",
616
- " if url.startswith(\"http\") and filter_url not in unique_urls:\n",
617
- " files_urls += f\"{current_tag}:{url}, \"\n",
618
- " unique_urls.add(filter_url)\n",
619
- "\n",
620
- " return files_urls\n",
621
- "\n",
622
- "file_urls = \"\"\n",
623
- "unique_urls = set()\n",
624
- "\n",
625
- "if custom_file_urls:\n",
626
- " for custom_file_url in custom_file_urls.replace(',', '').split():\n",
627
- " if not custom_file_url.endswith('.txt'):\n",
628
- " custom_file_url += '.txt'\n",
629
- " if not custom_file_url.startswith('http'):\n",
630
- " if not custom_file_url.startswith(root_path):\n",
631
- " custom_file_url = f'{root_path}/{custom_file_url}'\n",
632
- "\n",
633
- " try:\n",
634
- " file_urls += process_file_download(custom_file_url, prefixes, unique_urls)\n",
635
- " except FileNotFoundError:\n",
636
- " pass\n",
637
- "\n",
638
- "# url prefixing\n",
639
- "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n",
640
- "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())\n",
641
- "url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
642
- "\n",
643
- "if detailed_download == \"on\":\n",
644
- " print(\"\\n\\n\\033[33m# ====== Detailed Download ====== #\\n\\033[0m\")\n",
645
- " download(url)\n",
646
- " print(\"\\n\\033[33m# =============================== #\\n\\033[0m\")\n",
647
- "else:\n",
648
- " with capture.capture_output() as cap:\n",
649
- " download(url)\n",
650
- " del cap\n",
651
- "\n",
652
- "print(\"\\r🏁 Download Complete!\" + \" \"*15)\n",
653
- "\n",
654
- "\n",
655
- "# Cleaning shit after downloading...\n",
656
- "!find {webui_path} \\( -type d \\( -name \".ipynb_checkpoints\" -o -name \".aria2\" \\) -o -type f -name \"*.aria2\" \\) -exec rm -r {{}} \\; >/dev/null 2>&1\n",
657
- "\n",
658
- "\n",
659
- "## Install of Custom extensions\n",
660
- "if len(extension_repo) > 0:\n",
661
- " print(\"✨ Installing custom extensions...\", end='', flush=True)\n",
662
- " with capture.capture_output() as cap:\n",
663
- " for repo, repo_name in extension_repo:\n",
664
- " if not repo_name:\n",
665
- " repo_name = repo.split('/')[-1]\n",
666
- " !cd {extensions_dir} \\\n",
667
- " && git clone {repo} {repo_name} \\\n",
668
- " && cd {repo_name} \\\n",
669
- " && git fetch\n",
670
- " del cap\n",
671
- " print(f\"\\r📦 Installed '{len(extension_repo)}', Custom extensions!\")\n",
672
- "\n",
673
- "\n",
674
- "## List Models and stuff V2\n",
675
- "if detailed_download == \"off\":\n",
676
- " print(\"\\n\\n\\033[33mIf you don't see any downloaded files, enable the 'Detailed Downloads' feature in the widget.\")\n",
677
- "\n",
678
- "%run {root_path}/file_cell/special/dl_display_results.py # display widgets result"
679
- ]
680
- }
681
- ],
682
- "metadata": {
683
- "colab": {
684
- "provenance": []
685
- },
686
- "kernelspec": {
687
- "display_name": "Python 3",
688
- "name": "python3"
689
- },
690
- "language_info": {
691
- "name": "python"
692
- }
693
- },
694
- "nbformat": 4,
695
- "nbformat_minor": 0
 
 
696
  }
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "metadata": {
7
+ "id": "2lJmbqrs3Mu8"
8
+ },
9
+ "outputs": [],
10
+ "source": [
11
+ "##~ DOWNLOADING CODE | BY: ANXETY ~##\n",
12
+ "\n",
13
+ "import os\n",
14
+ "import re\n",
15
+ "import time\n",
16
+ "import json\n",
17
+ "import shutil\n",
18
+ "import zipfile\n",
19
+ "import requests\n",
20
+ "import subprocess\n",
21
+ "from datetime import timedelta\n",
22
+ "from subprocess import getoutput\n",
23
+ "from IPython.utils import capture\n",
24
+ "from IPython.display import clear_output\n",
25
+ "from urllib.parse import urlparse, parse_qs\n",
26
+ "\n",
27
+ "\n",
28
+ "# ================= DETECT ENV =================\n",
29
+ "def detect_environment():\n",
30
+ " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
31
+ " environments = {\n",
32
+ " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
33
+ " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
34
+ " }\n",
35
+ " for env_var, (environment, path) in environments.items():\n",
36
+ " if env_var in os.environ:\n",
37
+ " return environment, path, free_plan\n",
38
+ "\n",
39
+ "env, root_path, free_plan = detect_environment()\n",
40
+ "webui_path = f\"{root_path}/sdw\"\n",
41
+ "\n",
42
+ "\n",
43
+ "# ================ LIBRARIES V2 ================\n",
44
+ "flag_file = f\"{root_path}/libraries_installed.txt\"\n",
45
+ "\n",
46
+ "if not os.path.exists(flag_file):\n",
47
+ " print(\"💿 Installing the libraries, it's going to take a while:\\n\")\n",
48
+ "\n",
49
+ " install_lib = {\n",
50
+ " # \"aria2\": \"apt -y install aria2\",\n",
51
+ " \"aria2\": \"pip install aria2\",\n",
52
+ " \"localtunnel\": \"npm install -g localtunnel\",\n",
53
+ " \"insightface\": \"pip install insightface\"\n",
54
+ " }\n",
55
+ "\n",
56
+ " additional_libs = {\n",
57
+ " \"Google Colab\": {\n",
58
+ " \"xformers\": \"pip install xformers==0.0.27 --no-deps\"\n",
59
+ " },\n",
60
+ " \"Kaggle\": {\n",
61
+ " \"xformers\": \"pip install xformers==0.0.26.post1\",\n",
62
+ " # \"torch\": \"pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\",\n",
63
+ " # \"aiohttp\": \"pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*\" # fix install req\n",
64
+ " }\n",
65
+ " }\n",
66
+ "\n",
67
+ " if env in additional_libs:\n",
68
+ " install_lib.update(additional_libs[env])\n",
69
+ "\n",
70
+ " # Loop through libraries\n",
71
+ " for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n",
72
+ " print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n",
73
+ " subprocess.run(install_cmd, shell=True, capture_output=True)\n",
74
+ "\n",
75
+ " # Additional specific packages\n",
76
+ " with capture.capture_output() as cap:\n",
77
+ " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
78
+ " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
79
+ " !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.32/zrok_0.4.32_linux_amd64.tar.gz && tar -xzf zrok_0.4.32_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.32_linux_amd64.tar.gz\n",
80
+ " del cap\n",
81
+ "\n",
82
+ " clear_output()\n",
83
+ "\n",
84
+ " # Save file install lib\n",
85
+ " with open(flag_file, \"w\") as f:\n",
86
+ " f.write(\">W<'\")\n",
87
+ "\n",
88
+ " print(\"🍪 Libraries are installed!\" + \" \"*35)\n",
89
+ " time.sleep(2)\n",
90
+ " clear_output()\n",
91
+ "\n",
92
+ "\n",
93
+ "# ================= loading settings V4 =================\n",
94
+ "def load_settings(path):\n",
95
+ " if os.path.exists(path):\n",
96
+ " with open(path, 'r') as file:\n",
97
+ " return json.load(file)\n",
98
+ " return {}\n",
99
+ "\n",
100
+ "settings = load_settings(f'{root_path}/settings.json')\n",
101
+ "\n",
102
+ "VARIABLES = [\n",
103
+ " 'model', 'model_num', 'inpainting_model',\n",
104
+ " 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',\n",
105
+ " 'change_webui', 'detailed_download', 'controlnet',\n",
106
+ " 'controlnet_num', 'commit_hash', 'huggingface_token',\n",
107
+ " 'ngrok_token', 'zrok_token', 'commandline_arguments',\n",
108
+ " 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',\n",
109
+ " 'Extensions_url', 'custom_file_urls'\n",
110
+ "]\n",
111
+ "\n",
112
+ "locals().update({key: settings.get(key) for key in VARIABLES})\n",
113
+ "\n",
114
+ "\n",
115
+ "# ================= OTHER =================\n",
116
+ "try:\n",
117
+ " start_colab\n",
118
+ "except:\n",
119
+ " start_colab = int(time.time())-5\n",
120
+ "\n",
121
+ "# CONFIG DIR\n",
122
+ "models_dir = f\"{webui_path}/models/Stable-diffusion\"\n",
123
+ "vaes_dir = f\"{webui_path}/models/VAE\"\n",
124
+ "embeddings_dir = f\"{webui_path}/embeddings\"\n",
125
+ "loras_dir = f\"{webui_path}/models/Lora\"\n",
126
+ "extensions_dir = f\"{webui_path}/extensions\"\n",
127
+ "control_dir = f\"{webui_path}/models/ControlNet\"\n",
128
+ "adetailer_dir = f\"{webui_path}/models/adetailer\"\n",
129
+ "\n",
130
+ "\n",
131
+ "# ================= MAIN CODE =================\n",
132
+ "if not os.path.exists(webui_path):\n",
133
+ " start_install = int(time.time())\n",
134
+ " print(\"⌚ Unpacking Stable Diffusion...\" if change_webui != 'Forge' else \"⌚ Unpacking Stable Diffusion (Forge)...\", end='')\n",
135
+ " with capture.capture_output() as cap:\n",
136
+ " aria2_command = \"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M\"\n",
137
+ " url = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip\" if change_webui != 'Forge' else \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip\"\n",
138
+ " !{aria2_command} {url} -o repo.zip\n",
139
+ "\n",
140
+ " !unzip -q -o repo.zip -d {webui_path}\n",
141
+ " !rm -rf repo.zip\n",
142
+ "\n",
143
+ " %cd {root_path}\n",
144
+ " os.environ[\"SAFETENSORS_FAST_GPU\"]='1'\n",
145
+ " os.environ[\"CUDA_MODULE_LOADING\"]=\"LAZY\"\n",
146
+ " os.environ[\"TF_CPP_MIN_LOG_LEVEL\"] = \"3\"\n",
147
+ " os.environ[\"PYTHONWARNINGS\"] = \"ignore\"\n",
148
+ "\n",
149
+ " !echo -n {start_colab} > {webui_path}/static/colabTimer.txt\n",
150
+ " del cap\n",
151
+ " install_time = timedelta(seconds=time.time()-start_install)\n",
152
+ " print(\"\\r🚀 Unpacking is complete! For\",\"%02d:%02d:%02d ⚡\\n\" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)\n",
153
+ "else:\n",
154
+ " print(\"🚀 All unpacked... Skip. ⚡\")\n",
155
+ " start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
156
+ " time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
157
+ " print(f\"⌚️ You have been conducting this session for - \\033[33m{time_since_start}\\033[0m\")\n",
158
+ "\n",
159
+ "\n",
160
+ "## Changes extensions and WebUi\n",
161
+ "if latest_webui or latest_exstensions:\n",
162
+ " action = \"Updating WebUI and Extensions\" if latest_webui and latest_exstensions else (\"WebUI Update\" if latest_webui else \"Update Extensions\")\n",
163
+ " print(f\"⌚️ {action}...\", end='', flush=True)\n",
164
+ " with capture.capture_output() as cap:\n",
165
+ " !git config --global user.email \"[email protected]\"\n",
166
+ " !git config --global user.name \"Your Name\"\n",
167
+ "\n",
168
+ " ## Update Webui\n",
169
+ " if latest_webui:\n",
170
+ " %cd {webui_path}\n",
171
+ " !git restore .\n",
172
+ " !git pull -X theirs --rebase --autostash\n",
173
+ "\n",
174
+ " ## Update extensions\n",
175
+ " if latest_exstensions:\n",
176
+ " !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n",
177
+ " del cap\n",
178
+ " print(f\"\\r✨ {action} Completed!\")\n",
179
+ "\n",
180
+ "\n",
181
+ "# === FIXING EXTENSIONS ===\n",
182
+ "anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n",
183
+ "\n",
184
+ "with capture.capture_output() as cap:\n",
185
+ " # --- Umi-Wildcard ---\n",
186
+ " !sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default\n",
187
+ "\n",
188
+ " # --- Encrypt-Image ---\n",
189
+ " !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui\n",
190
+ "\n",
191
+ " # --- Additional-Networks ---\n",
192
+ " !wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style\n",
193
+ "del cap\n",
194
+ "\n",
195
+ "\n",
196
+ "## Version switching\n",
197
+ "if commit_hash:\n",
198
+ " print('⏳ Time machine activation...', end=\"\", flush=True)\n",
199
+ " with capture.capture_output() as cap:\n",
200
+ " %cd {webui_path}\n",
201
+ " !git config --global user.email \"[email protected]\"\n",
202
+ " !git config --global user.name \"Your Name\"\n",
203
+ " !git reset --hard {commit_hash}\n",
204
+ " del cap\n",
205
+ " print(f\"\\r⌛️ The time machine has been activated! Current commit: \\033[34m{commit_hash}\\033[0m\")\n",
206
+ "\n",
207
+ "\n",
208
+ "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n",
209
+ "print(\"📦 Downloading models and stuff...\", end='')\n",
210
+ "model_list = {\n",
211
+ " \"1.Anime (by XpucT) + INP\": [\n",
212
+ " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_V2.safetensors\"},\n",
213
+ " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_V2-inpainting.safetensors\"}\n",
214
+ " ],\n",
215
+ " \"2.BluMix [Anime] [V7] + INP\": [\n",
216
+ " {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_V7.safetensors\"},\n",
217
+ " {\"url\": \"https://civitai.com/api/download/models/363850\", \"name\": \"BluMix_V7-inpainting.safetensors\"}\n",
218
+ " ],\n",
219
+ " \"3.Cetus-Mix [Anime] [V4] + INP\": [\n",
220
+ " {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n",
221
+ " {\"url\": \"https://civitai.com/api/download/models/139882\", \"name\": \"CetusMix_V4-inpainting.safetensors\"}\n",
222
+ " ],\n",
223
+ " \"4.Counterfeit [Anime] [V3] + INP\": [\n",
224
+ " {\"url\": \"https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors\", \"name\": \"Counterfeit_V3.safetensors\"},\n",
225
+ " {\"url\": \"https://civitai.com/api/download/models/137911\", \"name\": \"Counterfeit_V3-inpainting.safetensors\"}\n",
226
+ " ],\n",
227
+ " \"5.CuteColor [Anime] [V3]\": [\n",
228
+ " {\"url\": \"https://civitai.com/api/download/models/138754\", \"name\": \"CuteColor_V3.safetensors\"}\n",
229
+ " ],\n",
230
+ " \"6.Dark-Sushi-Mix [Anime]\": [\n",
231
+ " {\"url\": \"https://civitai.com/api/download/models/101640\", \"name\": \"DarkSushiMix_2_5D.safetensors\"},\n",
232
+ " {\"url\": \"https://civitai.com/api/download/models/56071\", \"name\": \"DarkSushiMix_colorful.safetensors\"}\n",
233
+ " ],\n",
234
+ " \"7.Deliberate [Realism] [V6] + INP\": [\n",
235
+ " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors\", \"name\": \"Deliberate_V6.safetensors\"},\n",
236
+ " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors\", \"name\": \"Deliberate_V6-inpainting.safetensors\"}\n",
237
+ " ],\n",
238
+ " \"8.Meina-Mix [Anime] [V11] + INP\": [\n",
239
+ " {\"url\": \"https://civitai.com/api/download/models/119057\", \"name\": \"MeinaMix_V11.safetensors\"},\n",
240
+ " {\"url\": \"https://civitai.com/api/download/models/120702\", \"name\": \"MeinaMix_V11-inpainting.safetensors\"}\n",
241
+ " ],\n",
242
+ " \"9.Mix-Pro [Anime] [V4] + INP\": [\n",
243
+ " {\"url\": \"https://civitai.com/api/download/models/125668\", \"name\": \"MixPro_V4.safetensors\"},\n",
244
+ " {\"url\": \"https://civitai.com/api/download/models/139878\", \"name\": \"MixPro_V4-inpainting.safetensors\"}\n",
245
+ " ]\n",
246
+ "}\n",
247
+ "\n",
248
+ "vae_list = {\n",
249
+ " \"1.Anime.vae\": [{\"url\": \"https://civitai.com/api/download/models/311162\", \"name\": \"Anime.vae.safetensors\"}],\n",
250
+ " \"2.Anything.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors\", \"name\": \"Anything.vae.safetensors\"}],\n",
251
+ " \"3.Blessed2.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors\", \"name\": \"Blessed2.vae.safetensors\"}],\n",
252
+ " \"4.ClearVae.vae\": [{\"url\": \"https://civitai.com/api/download/models/88156\", \"name\": \"ClearVae_23.vae.safetensors\"}],\n",
253
+ " \"5.WD.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors\", \"name\": \"WD.vae.safetensors\"}]\n",
254
+ "}\n",
255
+ "\n",
256
+ "controlnet_list = {\n",
257
+ " \"1.canny\": [\n",
258
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors\", \"name\": \"control_v11p_sd15_canny_fp16.safetensors\"},\n",
259
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml\", \"name\": \"control_v11p_sd15_canny_fp16.yaml\"}\n",
260
+ " ],\n",
261
+ " \"2.openpose\": [\n",
262
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors\", \"name\": \"control_v11p_sd15_openpose_fp16.safetensors\"},\n",
263
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml\", \"name\": \"control_v11p_sd15_openpose_fp16.yaml\"}\n",
264
+ " ],\n",
265
+ " \"3.depth\": [\n",
266
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors\", \"name\": \"control_v11f1p_sd15_depth_fp16.safetensors\"},\n",
267
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml\", \"name\": \"control_v11f1p_sd15_depth_fp16.yaml\"},\n",
268
+ " {\"url\": \"https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors\", \"name\": \"control_v11p_sd15_depth_anything_fp16.safetensors\"}\n",
269
+ " ],\n",
270
+ " \"4.normal_map\": [\n",
271
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors\", \"name\": \"control_v11p_sd15_normalbae_fp16.safetensors\"},\n",
272
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml\", \"name\": \"control_v11p_sd15_normalbae_fp16.yaml\"}\n",
273
+ " ],\n",
274
+ " \"5.mlsd\": [\n",
275
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors\", \"name\": \"control_v11p_sd15_mlsd_fp16.safetensors\"},\n",
276
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml\", \"name\": \"control_v11p_sd15_mlsd_fp16.yaml\"}\n",
277
+ " ],\n",
278
+ " \"6.lineart\": [\n",
279
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors\", \"name\": \"control_v11p_sd15_lineart_fp16.safetensors\"},\n",
280
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.safetensors\"},\n",
281
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml\", \"name\": \"control_v11p_sd15_lineart_fp16.yaml\"},\n",
282
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.yaml\"}\n",
283
+ " ],\n",
284
+ " \"7.soft_edge\": [\n",
285
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors\", \"name\": \"control_v11p_sd15_softedge_fp16.safetensors\"},\n",
286
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml\", \"name\": \"control_v11p_sd15_softedge_fp16.yaml\"}\n",
287
+ " ],\n",
288
+ " \"8.scribble\": [\n",
289
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors\", \"name\": \"control_v11p_sd15_scribble_fp16.safetensors\"},\n",
290
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml\", \"name\": \"control_v11p_sd15_scribble_fp16.yaml\"}\n",
291
+ " ],\n",
292
+ " \"9.segmentation\": [\n",
293
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors\", \"name\": \"control_v11p_sd15_seg_fp16.safetensors\"},\n",
294
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml\", \"name\": \"control_v11p_sd15_seg_fp16.yaml\"}\n",
295
+ " ],\n",
296
+ " \"10.shuffle\": [\n",
297
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors\", \"name\": \"control_v11e_sd15_shuffle_fp16.safetensors\"},\n",
298
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml\", \"name\": \"control_v11e_sd15_shuffle_fp16.yaml\"}\n",
299
+ " ],\n",
300
+ " \"11.tile\": [\n",
301
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors\", \"name\": \"control_v11f1e_sd15_tile_fp16.safetensors\"},\n",
302
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml\", \"name\": \"control_v11f1e_sd15_tile_fp16.yaml\"}\n",
303
+ " ],\n",
304
+ " \"12.inpaint\": [\n",
305
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors\", \"name\": \"control_v11p_sd15_inpaint_fp16.safetensors\"},\n",
306
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml\", \"name\": \"control_v11p_sd15_inpaint_fp16.yaml\"}\n",
307
+ " ],\n",
308
+ " \"13.instruct_p2p\": [\n",
309
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors\", \"name\": \"control_v11e_sd15_ip2p_fp16.safetensors\"},\n",
310
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml\", \"name\": \"control_v11e_sd15_ip2p_fp16.yaml\"}\n",
311
+ " ]\n",
312
+ "}\n",
313
+ "\n",
314
+ "url = \"\"\n",
315
+ "prefixes = {\n",
316
+ " \"model\": models_dir,\n",
317
+ " \"vae\": vaes_dir,\n",
318
+ " \"lora\": loras_dir,\n",
319
+ " \"embed\": embeddings_dir,\n",
320
+ " \"extension\": extensions_dir,\n",
321
+ " \"control\": control_dir,\n",
322
+ " \"adetailer\": adetailer_dir,\n",
323
+ " \"config\": webui_path\n",
324
+ "}\n",
325
+ "\n",
326
+ "extension_repo = []\n",
327
+ "directories = [value for key, value in prefixes.items()] # for unpucking zip files\n",
328
+ "!mkdir -p {\" \".join(directories)}\n",
329
+ "\n",
330
+ "hf_token = huggingface_token if huggingface_token else \"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\"\n",
331
+ "user_header = f\"\\\"Authorization: Bearer {hf_token}\\\"\"\n",
332
+ "\n",
333
+ "''' Formatted Info Output '''\n",
334
+ "\n",
335
+ "from math import floor\n",
336
+ "\n",
337
+ "def center_text(text, terminal_width=45):\n",
338
+ " text_length = len(text)\n",
339
+ " left_padding = floor((terminal_width - text_length) / 2)\n",
340
+ " right_padding = terminal_width - text_length - left_padding\n",
341
+ " return f\"\\033[1m\\033[36m{' ' * left_padding}{text}{' ' * right_padding}\\033[0m\\033[32m\"\n",
342
+ "\n",
343
+ "def format_output(url, dst_dir, file_name):\n",
344
+ " info = f\"[{file_name.split('.')[0]}]\"\n",
345
+ " info = center_text(info)\n",
346
+ "\n",
347
+ " print(f\"\\n\\033[32m{'---'*20}]{info}[{'---'*20}\")\n",
348
+ " print(f\"\\033[33mURL: \\033[34m{url}\")\n",
349
+ " print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n",
350
+ " print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
351
+ "\n",
352
+ "''' GET CivitAi API - DATA '''\n",
353
+ "\n",
354
+ "def strip_(url, file_name=None):\n",
355
+ " if 'github.com' in url:\n",
356
+ " if '/blob/' in url:\n",
357
+ " url = url.replace('/blob/', '/raw/')\n",
358
+ "\n",
359
+ " elif \"civitai.com\" in url:\n",
360
+ " return CivitAi_API(url, file_name)\n",
361
+ "\n",
362
+ " elif \"huggingface.co\" in url:\n",
363
+ " if '/blob/' in url:\n",
364
+ " url = url.replace('/blob/', '/resolve/')\n",
365
+ " if '?' in url:\n",
366
+ " url = url.split('?')[0]\n",
367
+ "\n",
368
+ " return url\n",
369
+ "\n",
370
+ "def CivitAi_API(url, file_name=None):\n",
371
+ " support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')\n",
372
+ " civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n",
373
+ "\n",
374
+ " if '?token=' in url:\n",
375
+ " url = url.split('?token=')[0]\n",
376
+ " if '?type=' in url:\n",
377
+ " url = url.replace('?type=', f'?token={civitai_token}&type=')\n",
378
+ " else:\n",
379
+ " url = f\"{url}?token={civitai_token}\"\n",
380
+ "\n",
381
+ " # Determine model or version id\n",
382
+ " if \"civitai.com/models/\" in url:\n",
383
+ " if '?modelVersionId=' in url:\n",
384
+ " version_id = url.split('?modelVersionId=')[1]\n",
385
+ " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
386
+ " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
387
+ " else:\n",
388
+ " model_id = url.split('/models/')[1].split('/')[0]\n",
389
+ " response = requests.get(f\"https://civitai.com/api/v1/models/{model_id}\")\n",
390
+ " # print(f\"end - https://civitai.com/api/v1/models/{model_id}\")\n",
391
+ " else:\n",
392
+ " version_id = url.split('/models/')[1].split('/')[0]\n",
393
+ " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
394
+ " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
395
+ "\n",
396
+ " data = response.json()\n",
397
+ "\n",
398
+ " if response.status_code != 200:\n",
399
+ " return None, None, None, None, None, None, None\n",
400
+ "\n",
401
+ " # Define model type and name\n",
402
+ " if \"civitai.com/models/\" in url:\n",
403
+ " if '?modelVersionId=' in url:\n",
404
+ " model_type = data['model']['type']\n",
405
+ " model_name = data['files'][0]['name']\n",
406
+ " else:\n",
407
+ " model_type = data['type']\n",
408
+ " model_name = data['modelVersions'][0]['files'][0]['name']\n",
409
+ " elif 'type=' in url:\n",
410
+ " model_type = parse_qs(urlparse(url).query).get('type', [''])[0]\n",
411
+ " if 'model' in model_type.lower():\n",
412
+ " model_name = data['files'][0]['name']\n",
413
+ " else:\n",
414
+ " model_name = data['files'][1]['name']\n",
415
+ " else:\n",
416
+ " model_type = data['model']['type']\n",
417
+ " model_name = data['files'][0]['name']\n",
418
+ "\n",
419
+ " model_name = file_name or model_name\n",
420
+ "\n",
421
+ " # Determine DownloadUrl\n",
422
+ " if \"civitai.com/models/\" in url:\n",
423
+ " if '?modelVersionId=' in url:\n",
424
+ " download_url = data.get('downloadUrl')\n",
425
+ " else:\n",
426
+ " download_url = data[\"modelVersions\"][0].get(\"downloadUrl\", \"\")\n",
427
+ " elif 'type=' in url:\n",
428
+ " if any(t.lower() in model_type.lower() for t in support_types):\n",
429
+ " download_url = data['files'][0]['downloadUrl']\n",
430
+ " else:\n",
431
+ " download_url = data['files'][1]['downloadUrl']\n",
432
+ " else:\n",
433
+ " download_url = data.get('downloadUrl')\n",
434
+ "\n",
435
+ " clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token\n",
436
+ "\n",
437
+ " # Find a safe image: level less than 4 | Kaggle\n",
438
+ " image_url, image_name = None, None\n",
439
+ " if any(t in model_type for t in support_types):\n",
440
+ " try:\n",
441
+ " images = data.get('images') or data['modelVersions'][0].get('images', [])\n",
442
+ " if env == 'Kaggle':\n",
443
+ " image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)\n",
444
+ " else:\n",
445
+ " image_url = images[0]['url'] if images else None\n",
446
+ " except KeyError:\n",
447
+ " pass\n",
448
+ "\n",
449
+ " # Generate a name to save the image\n",
450
+ " image_name = f\"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}\" if image_url else None\n",
451
+ "\n",
452
+ " return f\"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}\", clean_url, model_type, model_name, image_url, image_name, data\n",
453
+ "\n",
454
+ "''' Main Download Code '''\n",
455
+ "\n",
456
+ "def download(url):\n",
457
+ " links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]\n",
458
+ "\n",
459
+ " for link_or_path in links_and_paths:\n",
460
+ " if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):\n",
461
+ " handle_manual(link_or_path)\n",
462
+ " else:\n",
463
+ " url, dst_dir, file_name = link_or_path.split()\n",
464
+ " manual_download(url, dst_dir, file_name)\n",
465
+ "\n",
466
+ " unpack_zip_files()\n",
467
+ "\n",
468
+ "def unpack_zip_files():\n",
469
+ " for directory in directories:\n",
470
+ " for root, _, files in os.walk(directory):\n",
471
+ " for file in files:\n",
472
+ " if file.endswith(\".zip\"):\n",
473
+ " zip_path = os.path.join(root, file)\n",
474
+ " extract_path = os.path.splitext(zip_path)[0]\n",
475
+ " with zipfile.ZipFile(zip_path, 'r') as zip_ref:\n",
476
+ " zip_ref.extractall(extract_path)\n",
477
+ " os.remove(zip_path)\n",
478
+ "\n",
479
+ "def handle_manual(url):\n",
480
+ " url_parts = url.split(':', 1)\n",
481
+ " prefix, path = url_parts[0], url_parts[1]\n",
482
+ "\n",
483
+ " file_name_match = re.search(r'\\[(.*?)\\]', path)\n",
484
+ " file_name = file_name_match.group(1) if file_name_match else None\n",
485
+ " if file_name:\n",
486
+ " path = re.sub(r'\\[.*?\\]', '', path)\n",
487
+ "\n",
488
+ " if prefix in prefixes:\n",
489
+ " dir = prefixes[prefix]\n",
490
+ " if prefix != \"extension\":\n",
491
+ " try:\n",
492
+ " manual_download(path, dir, file_name=file_name)\n",
493
+ " except Exception as e:\n",
494
+ " print(f\"Error downloading file: {e}\")\n",
495
+ " else:\n",
496
+ " extension_repo.append((path, file_name))\n",
497
+ "\n",
498
+ "def manual_download(url, dst_dir, file_name):\n",
499
+ " header_option = f\"--header={user_header}\"\n",
500
+ " aria2c_header = \"--header='User-Agent: Mozilla/5.0' --allow-overwrite=true\"\n",
501
+ " aria2_args = \"--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5\"\n",
502
+ "\n",
503
+ " if 'github.com' in url:\n",
504
+ " url = strip_(url)\n",
505
+ "\n",
506
+ " # -- CivitAi APi+ V2 --\n",
507
+ " elif 'civitai' in url:\n",
508
+ " url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)\n",
509
+ "\n",
510
+ " if image_url and image_name:\n",
511
+ " with capture.capture_output() as cap:\n",
512
+ " !aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'\n",
513
+ " del cap\n",
514
+ "\n",
515
+ " elif \"huggingface.co\" in url:\n",
516
+ " clean_url = strip_(url)\n",
517
+ " basename = clean_url.split(\"/\")[-1] if file_name is None else file_name\n",
518
+ "\n",
519
+ " \"\"\" Formatted info output \"\"\"\n",
520
+ " model_name_or_basename = file_name if not 'huggingface' in url else basename\n",
521
+ " format_output(clean_url or url, dst_dir, model_name_or_basename)\n",
522
+ "\n",
523
+ " # ## -- for my tests --\n",
524
+ " # print(url, dst_dir, model_name_or_basename)\n",
525
+ " print(f\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\") if 'civitai' in url and not data else None\n",
526
+ " if 'civitai' in url and data and image_name:\n",
527
+ " print(f\"\\033[32m[Preview DL]:\\033[0m {image_name} - {image_url}\\n\")\n",
528
+ " # =====================\n",
529
+ "\n",
530
+ " # -- Git Hub --\n",
531
+ " if 'github.com' in url or 'githubusercontent.com' in url:\n",
532
+ " !aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
533
+ "\n",
534
+ " # -- GDrive --\n",
535
+ " elif 'drive.google' in url:\n",
536
+ " try:\n",
537
+ " have_drive_link\n",
538
+ " except:\n",
539
+ " !pip install -q gdown==5.2.0 > /dev/null\n",
540
+ " have_drive_link = True\n",
541
+ "\n",
542
+ " if 'folders' in url:\n",
543
+ " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n",
544
+ " else:\n",
545
+ " if file_name:\n",
546
+ " !gdown \"{url}\" -O {dst_dir}/{file_name} --fuzzy -c\n",
547
+ " else:\n",
548
+ " !gdown \"{url}\" -O {dst_dir} --fuzzy -c\n",
549
+ "\n",
550
+ " # -- Hugging Face --\n",
551
+ " elif 'huggingface' in url:\n",
552
+ " !aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
553
+ "\n",
554
+ " # -- Other --\n",
555
+ " elif 'http' in url:\n",
556
+ " !aria2c {aria2c_header} {aria2_args} -d {dst_dir} -o \"{file_name if file_name else ''}\" '{url}'\n",
557
+ "\n",
558
+ "''' SubModels - Added URLs '''\n",
559
+ "\n",
560
+ "def add_submodels(selection, num_selection, model_dict, dst_dir):\n",
561
+ " if selection == \"none\":\n",
562
+ " return []\n",
563
+ " if selection == \"ALL\":\n",
564
+ " all_models = []\n",
565
+ " for models in model_dict.values():\n",
566
+ " all_models.extend(models)\n",
567
+ " selected_models = all_models\n",
568
+ " else:\n",
569
+ " selected_models = model_dict[selection]\n",
570
+ " selected_nums = map(int, num_selection.replace(',', '').split())\n",
571
+ " for num in selected_nums:\n",
572
+ " if 1 <= num <= len(model_dict):\n",
573
+ " name = list(model_dict)[num - 1]\n",
574
+ " selected_models.extend(model_dict[name])\n",
575
+ "\n",
576
+ " unique_models = list({model['name']: model for model in selected_models}.values())\n",
577
+ " for model in unique_models:\n",
578
+ " model['dst_dir'] = dst_dir\n",
579
+ "\n",
580
+ " return unique_models\n",
581
+ "\n",
582
+ "def handle_submodels(selection, num_selection, model_dict, dst_dir, url):\n",
583
+ " submodels = add_submodels(selection, num_selection, model_dict, dst_dir)\n",
584
+ " for submodel in submodels:\n",
585
+ " if not inpainting_model and \"inpainting\" in submodel['name']:\n",
586
+ " continue\n",
587
+ " url += f\"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, \"\n",
588
+ " return url\n",
589
+ "\n",
590
+ "url = handle_submodels(model, model_num, model_list, models_dir, url)\n",
591
+ "url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)\n",
592
+ "url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)\n",
593
+ "\n",
594
+ "''' file.txt - added urls '''\n",
595
+ "\n",
596
+ "def process_file_download(file_url, prefixes, unique_urls):\n",
597
+ " files_urls = \"\"\n",
598
+ "\n",
599
+ " if file_url.startswith(\"http\"):\n",
600
+ " if \"blob\" in file_url:\n",
601
+ " file_url = file_url.replace(\"blob\", \"raw\")\n",
602
+ " response = requests.get(file_url)\n",
603
+ " lines = response.text.split('\\n')\n",
604
+ " else:\n",
605
+ " with open(file_url, 'r') as file:\n",
606
+ " lines = file.readlines()\n",
607
+ "\n",
608
+ " current_tag = None\n",
609
+ " for line in lines:\n",
610
+ " line = line.strip()\n",
611
+ " if any(f'# {tag}' in line.lower() for tag in prefixes):\n",
612
+ " current_tag = next((tag for tag in prefixes if tag in line.lower()))\n",
613
+ "\n",
614
+ " urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n",
615
+ " for url in urls:\n",
616
+ " filter_url = url.split('[')[0] # same url filter\n",
617
+ "\n",
618
+ " if url.startswith(\"http\") and filter_url not in unique_urls:\n",
619
+ " files_urls += f\"{current_tag}:{url}, \"\n",
620
+ " unique_urls.add(filter_url)\n",
621
+ "\n",
622
+ " return files_urls\n",
623
+ "\n",
624
+ "file_urls = \"\"\n",
625
+ "unique_urls = set()\n",
626
+ "\n",
627
+ "if custom_file_urls:\n",
628
+ " for custom_file_url in custom_file_urls.replace(',', '').split():\n",
629
+ " if not custom_file_url.endswith('.txt'):\n",
630
+ " custom_file_url += '.txt'\n",
631
+ " if not custom_file_url.startswith('http'):\n",
632
+ " if not custom_file_url.startswith(root_path):\n",
633
+ " custom_file_url = f'{root_path}/{custom_file_url}'\n",
634
+ "\n",
635
+ " try:\n",
636
+ " file_urls += process_file_download(custom_file_url, prefixes, unique_urls)\n",
637
+ " except FileNotFoundError:\n",
638
+ " pass\n",
639
+ "\n",
640
+ "# url prefixing\n",
641
+ "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n",
642
+ "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())\n",
643
+ "url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
644
+ "\n",
645
+ "if detailed_download == \"on\":\n",
646
+ " print(\"\\n\\n\\033[33m# ====== Detailed Download ====== #\\n\\033[0m\")\n",
647
+ " download(url)\n",
648
+ " print(\"\\n\\033[33m# =============================== #\\n\\033[0m\")\n",
649
+ "else:\n",
650
+ " with capture.capture_output() as cap:\n",
651
+ " download(url)\n",
652
+ " del cap\n",
653
+ "\n",
654
+ "print(\"\\r🏁 Download Complete!\" + \" \"*15)\n",
655
+ "\n",
656
+ "\n",
657
+ "# Cleaning shit after downloading...\n",
658
+ "!find {webui_path} \\( -type d \\( -name \".ipynb_checkpoints\" -o -name \".aria2\" \\) -o -type f -name \"*.aria2\" \\) -exec rm -r {{}} \\; >/dev/null 2>&1\n",
659
+ "\n",
660
+ "\n",
661
+ "## Install of Custom extensions\n",
662
+ "if len(extension_repo) > 0:\n",
663
+ " print(\"✨ Installing custom extensions...\", end='', flush=True)\n",
664
+ " with capture.capture_output() as cap:\n",
665
+ " for repo, repo_name in extension_repo:\n",
666
+ " if not repo_name:\n",
667
+ " repo_name = repo.split('/')[-1]\n",
668
+ " !cd {extensions_dir} \\\n",
669
+ " && git clone {repo} {repo_name} \\\n",
670
+ " && cd {repo_name} \\\n",
671
+ " && git fetch\n",
672
+ " del cap\n",
673
+ " print(f\"\\r📦 Installed '{len(extension_repo)}', Custom extensions!\")\n",
674
+ "\n",
675
+ "\n",
676
+ "## List Models and stuff V2\n",
677
+ "if detailed_download == \"off\":\n",
678
+ " print(\"\\n\\n\\033[33mIf you don't see any downloaded files, enable the 'Detailed Downloads' feature in the widget.\")\n",
679
+ "\n",
680
+ "%run {root_path}/file_cell/special/dl_display_results.py # display widgets result"
681
+ ]
682
+ }
683
+ ],
684
+ "metadata": {
685
+ "colab": {
686
+ "provenance": []
687
+ },
688
+ "kernelspec": {
689
+ "display_name": "Python 3",
690
+ "name": "python3"
691
+ },
692
+ "language_info": {
693
+ "name": "python"
694
+ }
695
+ },
696
+ "nbformat": 4,
697
+ "nbformat_minor": 0
698
  }
files_cells/notebooks/en/launch_en.ipynb CHANGED
@@ -1,145 +1,145 @@
1
- {
2
- "nbformat": 4,
3
- "nbformat_minor": 0,
4
- "metadata": {
5
- "colab": {
6
- "provenance": []
7
- },
8
- "kernelspec": {
9
- "name": "python3",
10
- "display_name": "Python 3"
11
- },
12
- "language_info": {
13
- "name": "python"
14
- }
15
- },
16
- "cells": [
17
- {
18
- "cell_type": "code",
19
- "execution_count": null,
20
- "metadata": {
21
- "id": "JKTCrY9LU7Oq"
22
- },
23
- "outputs": [],
24
- "source": [
25
- "##~ LAUNCH CODE | BY: ANXETY ~##\n",
26
- "\n",
27
- "import os\n",
28
- "import re\n",
29
- "import time\n",
30
- "import json\n",
31
- "import requests\n",
32
- "import cloudpickle as pickle\n",
33
- "from datetime import timedelta\n",
34
- "from IPython.display import clear_output\n",
35
- "\n",
36
- "# ================= DETECT ENV =================\n",
37
- "def detect_environment():\n",
38
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
39
- " environments = {\n",
40
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
41
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
42
- " }\n",
43
- "\n",
44
- " for env_var, (environment, path) in environments.items():\n",
45
- " if env_var in os.environ:\n",
46
- " return environment, path, free_plan\n",
47
- " return 'Unknown', '/unknown/path', free_plan\n",
48
- "\n",
49
- "env, root_path, free_plan = detect_environment()\n",
50
- "webui_path = f\"{root_path}/sdw\"\n",
51
- "\n",
52
- "def load_settings():\n",
53
- " SETTINGS_FILE = f'{root_path}/settings.json'\n",
54
- " if os.path.exists(SETTINGS_FILE):\n",
55
- " with open(SETTINGS_FILE, 'r') as f:\n",
56
- " return json.load(f)\n",
57
- " return {}\n",
58
- "\n",
59
- "settings = load_settings()\n",
60
- "ngrok_token = settings.get('ngrok_token', \"\")\n",
61
- "zrok_token = settings.get('zrok_token', \"\")\n",
62
- "commandline_arguments = settings.get('commandline_arguments', \"\")\n",
63
- "change_webui = settings.get('change_webui', \"\")\n",
64
- "\n",
65
- "# ======================== TUNNEL V2 ========================\n",
66
- "print('Please Wait...')\n",
67
- "\n",
68
- "def get_public_ip(version='ipv4'):\n",
69
- " try:\n",
70
- " url = f'https://api64.ipify.org?format=json&{version}=true'\n",
71
- " response = requests.get(url)\n",
72
- " return response.json().get('ip', 'N/A')\n",
73
- " except Exception as e:\n",
74
- " print(f\"Error getting public {version} address:\", e)\n",
75
- "\n",
76
- "# Check if public IP is already saved, if not then get it\n",
77
- "public_ip_file = f\"{root_path}/public_ip.txt\"\n",
78
- "if os.path.exists(public_ip_file):\n",
79
- " with open(public_ip_file, 'r') as file:\n",
80
- " public_ipv4 = file.read().strip()\n",
81
- "else:\n",
82
- " public_ipv4 = get_public_ip(version='ipv4')\n",
83
- " with open(public_ip_file, 'w') as file:\n",
84
- " file.write(public_ipv4)\n",
85
- "\n",
86
- "tunnel_class = pickle.load(open(f\"{root_path}/new_tunnel\", \"rb\"), encoding=\"utf-8\")\n",
87
- "tunnel_port = 1834\n",
88
- "tunnel = tunnel_class(tunnel_port)\n",
89
- "tunnel.add_tunnel(command=\"cl tunnel --url localhost:{port}\", name=\"cl\", pattern=re.compile(r\"[\\w-]+\\.trycloudflare\\.com\"))\n",
90
- "tunnel.add_tunnel(command=\"lt --port {port}\", name=\"lt\", pattern=re.compile(r\"[\\w-]+\\.loca\\.lt\"), note=\"Password : \" + \"\\033[32m\" + public_ipv4 + \"\\033[0m\" + \" rerun cell if 404 error.\")\n",
91
- "\n",
92
- "if zrok_token:\n",
93
- " !zrok enable {zrok_token} &> /dev/null\n",
94
- " tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
95
- "\n",
96
- "clear_output()\n",
97
- "\n",
98
- "# =============== Automatic Fixing Path V3 ===============\n",
99
- "paths_to_check = {\n",
100
- " \"tagger_hf_cache_dir\": f\"{webui_path}/models/interrogators/\",\n",
101
- " \"additional_networks_extra_lora_path\": f\"{webui_path}/models/Lora/\",\n",
102
- " \"ad_extra_models_dir\": f\"{webui_path}/models/adetailer/\",\n",
103
- " \"sd_checkpoint_hash\": \"\",\n",
104
- " \"sd_model_checkpoint\": \"\",\n",
105
- " \"sd_vae\": \"None\"\n",
106
- "}\n",
107
- "\n",
108
- "config_path = f'{webui_path}/ui-config.json'\n",
109
- "\n",
110
- "if os.path.exists(config_path):\n",
111
- " with open(config_path, 'r') as file:\n",
112
- " config_data = json.load(file)\n",
113
- "\n",
114
- " for key, value in paths_to_check.items():\n",
115
- " if key in config_data and config_data[key] != value:\n",
116
- " sed_command = f\"sed -i 's|\\\"{key}\\\": \\\".*\\\"|\\\"{key}\\\": \\\"{value}\\\"|' {config_path}\"\n",
117
- " os.system(sed_command)\n",
118
- "\n",
119
- " if env == 'Kaggle':\n",
120
- " get_ipython().system('sed -i \\'s/\"civitai_interface\\\\/NSFW content\\\\/value\":.*/\"civitai_interface\\\\/NSFW content\\\\/value\": false/g\\' {webui_path}/ui-config.json')\n",
121
- "\n",
122
- "with tunnel:\n",
123
- " %cd {webui_path}\n",
124
- "\n",
125
- " commandline_arguments += f' --port={tunnel_port}'\n",
126
- " if ngrok_token:\n",
127
- " commandline_arguments += f' --ngrok {ngrok_token}'\n",
128
- " if env != \"Google Colab\":\n",
129
- " commandline_arguments += f' --encrypt-pass={tunnel_port} --api'\n",
130
- "\n",
131
- " if change_webui == 'Forge':\n",
132
- " commandline_arguments += ' --cuda-stream --pin-shared-memory'\n",
133
- "\n",
134
- " !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
135
- "\n",
136
- "start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
137
- "time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
138
- "print(f\"\\n⌚️ \\033[0mYou have been conducting this session for - \\033[33m{time_since_start}\\033[0m\\n\\n\")\n",
139
- "\n",
140
- "if zrok_token:\n",
141
- " !zrok disable &> /dev/null"
142
- ]
143
- }
144
- ]
145
  }
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "provenance": []
7
+ },
8
+ "kernelspec": {
9
+ "name": "python3",
10
+ "display_name": "Python 3"
11
+ },
12
+ "language_info": {
13
+ "name": "python"
14
+ }
15
+ },
16
+ "cells": [
17
+ {
18
+ "cell_type": "code",
19
+ "execution_count": null,
20
+ "metadata": {
21
+ "id": "JKTCrY9LU7Oq"
22
+ },
23
+ "outputs": [],
24
+ "source": [
25
+ "##~ LAUNCH CODE | BY: ANXETY ~##\n",
26
+ "\n",
27
+ "import os\n",
28
+ "import re\n",
29
+ "import time\n",
30
+ "import json\n",
31
+ "import requests\n",
32
+ "import cloudpickle as pickle\n",
33
+ "from datetime import timedelta\n",
34
+ "from IPython.display import clear_output\n",
35
+ "\n",
36
+ "# ================= DETECT ENV =================\n",
37
+ "def detect_environment():\n",
38
+ " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
39
+ " environments = {\n",
40
+ " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
41
+ " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
42
+ " }\n",
43
+ "\n",
44
+ " for env_var, (environment, path) in environments.items():\n",
45
+ " if env_var in os.environ:\n",
46
+ " return environment, path, free_plan\n",
47
+ " return 'Unknown', '/unknown/path', free_plan\n",
48
+ "\n",
49
+ "env, root_path, free_plan = detect_environment()\n",
50
+ "webui_path = f\"{root_path}/sdw\"\n",
51
+ "\n",
52
+ "def load_settings():\n",
53
+ " SETTINGS_FILE = f'{root_path}/settings.json'\n",
54
+ " if os.path.exists(SETTINGS_FILE):\n",
55
+ " with open(SETTINGS_FILE, 'r') as f:\n",
56
+ " return json.load(f)\n",
57
+ " return {}\n",
58
+ "\n",
59
+ "settings = load_settings()\n",
60
+ "ngrok_token = settings.get('ngrok_token', \"\")\n",
61
+ "zrok_token = settings.get('zrok_token', \"\")\n",
62
+ "commandline_arguments = settings.get('commandline_arguments', \"\")\n",
63
+ "change_webui = settings.get('change_webui', \"\")\n",
64
+ "\n",
65
+ "# ======================== TUNNEL V2 ========================\n",
66
+ "print('Please Wait...')\n",
67
+ "\n",
68
+ "def get_public_ip(version='ipv4'):\n",
69
+ " try:\n",
70
+ " url = f'https://api64.ipify.org?format=json&{version}=true'\n",
71
+ " response = requests.get(url)\n",
72
+ " return response.json().get('ip', 'N/A')\n",
73
+ " except Exception as e:\n",
74
+ " print(f\"Error getting public {version} address:\", e)\n",
75
+ "\n",
76
+ "# Check if public IP is already saved, if not then get it\n",
77
+ "public_ip_file = f\"{root_path}/public_ip.txt\"\n",
78
+ "if os.path.exists(public_ip_file):\n",
79
+ " with open(public_ip_file, 'r') as file:\n",
80
+ " public_ipv4 = file.read().strip()\n",
81
+ "else:\n",
82
+ " public_ipv4 = get_public_ip(version='ipv4')\n",
83
+ " with open(public_ip_file, 'w') as file:\n",
84
+ " file.write(public_ipv4)\n",
85
+ "\n",
86
+ "tunnel_class = pickle.load(open(f\"{root_path}/new_tunnel\", \"rb\"), encoding=\"utf-8\")\n",
87
+ "tunnel_port = 1834\n",
88
+ "tunnel = tunnel_class(tunnel_port)\n",
89
+ "tunnel.add_tunnel(command=\"cl tunnel --url localhost:{port}\", name=\"cl\", pattern=re.compile(r\"[\\w-]+\\.trycloudflare\\.com\"))\n",
90
+ "tunnel.add_tunnel(command=\"lt --port {port}\", name=\"lt\", pattern=re.compile(r\"[\\w-]+\\.loca\\.lt\"), note=\"Password : \" + \"\\033[32m\" + public_ipv4 + \"\\033[0m\" + \" rerun cell if 404 error.\")\n",
91
+ "\n",
92
+ "if zrok_token:\n",
93
+ " !zrok enable {zrok_token} &> /dev/null\n",
94
+ " tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
95
+ "\n",
96
+ "clear_output()\n",
97
+ "\n",
98
+ "# =============== Automatic Fixing Path V3 ===============\n",
99
+ "paths_to_check = {\n",
100
+ " \"tagger_hf_cache_dir\": f\"{webui_path}/models/interrogators/\",\n",
101
+ " \"additional_networks_extra_lora_path\": f\"{webui_path}/models/Lora/\",\n",
102
+ " \"ad_extra_models_dir\": f\"{webui_path}/models/adetailer/\",\n",
103
+ " \"sd_checkpoint_hash\": \"\",\n",
104
+ " \"sd_model_checkpoint\": \"\",\n",
105
+ " \"sd_vae\": \"None\"\n",
106
+ "}\n",
107
+ "\n",
108
+ "config_path = f'{webui_path}/config.json'\n",
109
+ "\n",
110
+ "if os.path.exists(config_path):\n",
111
+ " with open(config_path, 'r') as file:\n",
112
+ " config_data = json.load(file)\n",
113
+ "\n",
114
+ " for key, value in paths_to_check.items():\n",
115
+ " if key in config_data and config_data[key] != value:\n",
116
+ " sed_command = f\"sed -i 's|\\\"{key}\\\": \\\".*\\\"|\\\"{key}\\\": \\\"{value}\\\"|' {config_path}\"\n",
117
+ " os.system(sed_command)\n",
118
+ "\n",
119
+ " if env == 'Kaggle':\n",
120
+ " !sed -i 's|\"civitai_interface NSFW content\":.*|\"civitai_interface NSFW content\": false,|' {webui_path}/ui-config.json\n",
121
+ "\n",
122
+ "with tunnel:\n",
123
+ " %cd {webui_path}\n",
124
+ "\n",
125
+ " commandline_arguments += f' --port={tunnel_port}'\n",
126
+ " if ngrok_token:\n",
127
+ " commandline_arguments += f' --ngrok {ngrok_token}'\n",
128
+ " if env != \"Google Colab\":\n",
129
+ " commandline_arguments += f' --encrypt-pass={tunnel_port} --api'\n",
130
+ "\n",
131
+ " if change_webui == 'Forge':\n",
132
+ " commandline_arguments += ' --cuda-stream --pin-shared-memory'\n",
133
+ "\n",
134
+ " !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
135
+ "\n",
136
+ "start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
137
+ "time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
138
+ "print(f\"\\n⌚️ \\033[0mYou have been conducting this session for - \\033[33m{time_since_start}\\033[0m\\n\\n\")\n",
139
+ "\n",
140
+ "if zrok_token:\n",
141
+ " !zrok disable &> /dev/null"
142
+ ]
143
+ }
144
+ ]
145
  }
files_cells/notebooks/ru/downloading_ru.ipynb CHANGED
@@ -1,696 +1,698 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": null,
6
- "metadata": {
7
- "id": "2lJmbqrs3Mu8"
8
- },
9
- "outputs": [],
10
- "source": [
11
- "##~ DOWNLOADING CODE | BY: ANXETY ~##\n",
12
- "\n",
13
- "import os\n",
14
- "import re\n",
15
- "import time\n",
16
- "import json\n",
17
- "import shutil\n",
18
- "import zipfile\n",
19
- "import requests\n",
20
- "import subprocess\n",
21
- "from datetime import timedelta\n",
22
- "from subprocess import getoutput\n",
23
- "from IPython.utils import capture\n",
24
- "from IPython.display import clear_output\n",
25
- "from urllib.parse import urlparse, parse_qs\n",
26
- "\n",
27
- "\n",
28
- "# ================= DETECT ENV =================\n",
29
- "def detect_environment():\n",
30
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
31
- " environments = {\n",
32
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
33
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
34
- " }\n",
35
- " for env_var, (environment, path) in environments.items():\n",
36
- " if env_var in os.environ:\n",
37
- " return environment, path, free_plan\n",
38
- "\n",
39
- "env, root_path, free_plan = detect_environment()\n",
40
- "webui_path = f\"{root_path}/sdw\"\n",
41
- "\n",
42
- "\n",
43
- "# ================ LIBRARIES V2 ================\n",
44
- "flag_file = f\"{root_path}/libraries_installed.txt\"\n",
45
- "\n",
46
- "if not os.path.exists(flag_file):\n",
47
- " print(\"💿 Установка библиотек, это займет какое-то время:\\n\")\n",
48
- "\n",
49
- " install_lib = {\n",
50
- " \"aria2\": \"apt -y install aria2\",\n",
51
- " \"localtunnel\": \"npm install -g localtunnel\",\n",
52
- " \"insightface\": \"pip install insightface\"\n",
53
- " }\n",
54
- "\n",
55
- " additional_libs = {\n",
56
- " \"Google Colab\": {\n",
57
- " \"xformers\": \"pip install xformers==0.0.26.post1 --no-deps\"\n",
58
- " },\n",
59
- " \"Kaggle\": {\n",
60
- " \"xformers\": \"pip install xformers==0.0.26.post1\",\n",
61
- " # \"torch\": \"pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\",\n",
62
- " \"aiohttp\": \"pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*\" # fix install req\n",
63
- " }\n",
64
- " }\n",
65
- "\n",
66
- " if env in additional_libs:\n",
67
- " install_lib.update(additional_libs[env])\n",
68
- "\n",
69
- " # Loop through libraries\n",
70
- " for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n",
71
- " print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n",
72
- " subprocess.run(install_cmd, shell=True, capture_output=True)\n",
73
- "\n",
74
- " # Additional specific packages\n",
75
- " with capture.capture_output() as cap:\n",
76
- " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
77
- " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
78
- " !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz\n",
79
- " del cap\n",
80
- "\n",
81
- " clear_output()\n",
82
- "\n",
83
- " # Save file install lib\n",
84
- " with open(flag_file, \"w\") as f:\n",
85
- " f.write(\">W<'\")\n",
86
- "\n",
87
- " print(\"🍪 Библиотеки установлены!\" + \" \"*35)\n",
88
- " time.sleep(2)\n",
89
- " clear_output()\n",
90
- "\n",
91
- "\n",
92
- "# ================= loading settings V4 =================\n",
93
- "def load_settings(path):\n",
94
- " if os.path.exists(path):\n",
95
- " with open(path, 'r') as file:\n",
96
- " return json.load(file)\n",
97
- " return {}\n",
98
- "\n",
99
- "settings = load_settings(f'{root_path}/settings.json')\n",
100
- "\n",
101
- "VARIABLES = [\n",
102
- " 'model', 'model_num', 'inpainting_model',\n",
103
- " 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',\n",
104
- " 'change_webui', 'detailed_download', 'controlnet',\n",
105
- " 'controlnet_num', 'commit_hash', 'huggingface_token',\n",
106
- " 'ngrok_token', 'zrok_token', 'commandline_arguments',\n",
107
- " 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',\n",
108
- " 'Extensions_url', 'custom_file_urls'\n",
109
- "]\n",
110
- "\n",
111
- "locals().update({key: settings.get(key) for key in VARIABLES})\n",
112
- "\n",
113
- "\n",
114
- "# ================= OTHER =================\n",
115
- "try:\n",
116
- " start_colab\n",
117
- "except:\n",
118
- " start_colab = int(time.time())-5\n",
119
- "\n",
120
- "# CONFIG DIR\n",
121
- "models_dir = f\"{webui_path}/models/Stable-diffusion\"\n",
122
- "vaes_dir = f\"{webui_path}/models/VAE\"\n",
123
- "embeddings_dir = f\"{webui_path}/embeddings\"\n",
124
- "loras_dir = f\"{webui_path}/models/Lora\"\n",
125
- "extensions_dir = f\"{webui_path}/extensions\"\n",
126
- "control_dir = f\"{webui_path}/models/ControlNet\"\n",
127
- "adetailer_dir = f\"{webui_path}/models/adetailer\"\n",
128
- "\n",
129
- "\n",
130
- "# ================= MAIN CODE =================\n",
131
- "if not os.path.exists(webui_path):\n",
132
- " start_install = int(time.time())\n",
133
- " print(\"⌚ Распаковка Stable Diffusion...\" if change_webui != 'Forge' else \"⌚ Распаковка Stable Diffusion (Forge)...\", end='')\n",
134
- " with capture.capture_output() as cap:\n",
135
- " aria2_command = \"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M\"\n",
136
- " url = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip\" if change_webui != 'Forge' else \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip\"\n",
137
- " !{aria2_command} {url} -o repo.zip\n",
138
- "\n",
139
- " !unzip -q -o repo.zip -d {webui_path}\n",
140
- " !rm -rf repo.zip\n",
141
- "\n",
142
- " %cd {root_path}\n",
143
- " os.environ[\"SAFETENSORS_FAST_GPU\"]='1'\n",
144
- " os.environ[\"CUDA_MODULE_LOADING\"]=\"LAZY\"\n",
145
- " os.environ[\"TF_CPP_MIN_LOG_LEVEL\"] = \"3\"\n",
146
- " os.environ[\"PYTHONWARNINGS\"] = \"ignore\"\n",
147
- "\n",
148
- " !echo -n {start_colab} > {webui_path}/static/colabTimer.txt\n",
149
- " del cap\n",
150
- " install_time = timedelta(seconds=time.time()-start_install)\n",
151
- " print(\"\\r🚀 Распаковка Завершена! За\",\"%02d:%02d:%02d ⚡\\n\" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)\n",
152
- "else:\n",
153
- " print(\"🚀 Все распакованно... Пропуск. ⚡\")\n",
154
- " start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
155
- " time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
156
- " print(f\"⌚️ Вы проводите эту сессию в течение - \\033[33m{time_since_start}\\033[0m\")\n",
157
- "\n",
158
- "\n",
159
- "## Changes extensions and WebUi\n",
160
- "if latest_webui or latest_exstensions:\n",
161
- " action = \"Обновление WebUI и Расширений\" if latest_webui and latest_exstensions else (\"Обновление WebUI\" if latest_webui else \"Обновление Расширений\")\n",
162
- " print(f\"⌚️ {action}...\", end='', flush=True)\n",
163
- " with capture.capture_output() as cap:\n",
164
- " !git config --global user.email \"[email protected]\"\n",
165
- " !git config --global user.name \"Your Name\"\n",
166
- "\n",
167
- " ## Update Webui\n",
168
- " if latest_webui:\n",
169
- " %cd {webui_path}\n",
170
- " !git restore .\n",
171
- " !git pull -X theirs --rebase --autostash\n",
172
- "\n",
173
- " ## Update extensions\n",
174
- " if latest_exstensions:\n",
175
- " !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n",
176
- " del cap\n",
177
- " print(f\"\\r✨ {action} Завершено!\")\n",
178
- "\n",
179
- "\n",
180
- "# === FIXING EXTENSIONS ===\n",
181
- "anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n",
182
- "\n",
183
- "with capture.capture_output() as cap:\n",
184
- " # --- Umi-Wildcard ---\n",
185
- " !sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default\n",
186
- "\n",
187
- " # --- Encrypt-Image ---\n",
188
- " !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui\n",
189
- "\n",
190
- " # --- Additional-Networks ---\n",
191
- " !wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style\n",
192
- "del cap\n",
193
- "\n",
194
- "\n",
195
- "## Version switching\n",
196
- "if commit_hash:\n",
197
- " print('⏳ Активация машины времени...', end=\"\", flush=True)\n",
198
- " with capture.capture_output() as cap:\n",
199
- " %cd {webui_path}\n",
200
- " !git config --global user.email \"[email protected]\"\n",
201
- " !git config --global user.name \"Your Name\"\n",
202
- " !git reset --hard {commit_hash}\n",
203
- " del cap\n",
204
- " print(f\"\\r⌛️ Машина времени активированна! Текущий коммит: \\033[34m{commit_hash}\\033[0m\")\n",
205
- "\n",
206
- "\n",
207
- "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n",
208
- "print(\"📦 Скачивание моделей и прочего...\", end='')\n",
209
- "model_list = {\n",
210
- " \"1.Anime (by XpucT) + INP\": [\n",
211
- " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_V2.safetensors\"},\n",
212
- " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_V2-inpainting.safetensors\"}\n",
213
- " ],\n",
214
- " \"2.BluMix [Anime] [V7] + INP\": [\n",
215
- " {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_V7.safetensors\"},\n",
216
- " {\"url\": \"https://civitai.com/api/download/models/363850\", \"name\": \"BluMix_V7-inpainting.safetensors\"}\n",
217
- " ],\n",
218
- " \"3.Cetus-Mix [Anime] [V4] + INP\": [\n",
219
- " {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n",
220
- " {\"url\": \"https://civitai.com/api/download/models/139882\", \"name\": \"CetusMix_V4-inpainting.safetensors\"}\n",
221
- " ],\n",
222
- " \"4.Counterfeit [Anime] [V3] + INP\": [\n",
223
- " {\"url\": \"https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors\", \"name\": \"Counterfeit_V3.safetensors\"},\n",
224
- " {\"url\": \"https://civitai.com/api/download/models/137911\", \"name\": \"Counterfeit_V3-inpainting.safetensors\"}\n",
225
- " ],\n",
226
- " \"5.CuteColor [Anime] [V3]\": [\n",
227
- " {\"url\": \"https://civitai.com/api/download/models/138754\", \"name\": \"CuteColor_V3.safetensors\"}\n",
228
- " ],\n",
229
- " \"6.Dark-Sushi-Mix [Anime]\": [\n",
230
- " {\"url\": \"https://civitai.com/api/download/models/101640\", \"name\": \"DarkSushiMix_2_5D.safetensors\"},\n",
231
- " {\"url\": \"https://civitai.com/api/download/models/56071\", \"name\": \"DarkSushiMix_colorful.safetensors\"}\n",
232
- " ],\n",
233
- " \"7.Deliberate [Realism] [V6] + INP\": [\n",
234
- " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors\", \"name\": \"Deliberate_V6.safetensors\"},\n",
235
- " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors\", \"name\": \"Deliberate_V6-inpainting.safetensors\"}\n",
236
- " ],\n",
237
- " \"8.Meina-Mix [Anime] [V11] + INP\": [\n",
238
- " {\"url\": \"https://civitai.com/api/download/models/119057\", \"name\": \"MeinaMix_V11.safetensors\"},\n",
239
- " {\"url\": \"https://civitai.com/api/download/models/120702\", \"name\": \"MeinaMix_V11-inpainting.safetensors\"}\n",
240
- " ],\n",
241
- " \"9.Mix-Pro [Anime] [V4] + INP\": [\n",
242
- " {\"url\": \"https://civitai.com/api/download/models/125668\", \"name\": \"MixPro_V4.safetensors\"},\n",
243
- " {\"url\": \"https://civitai.com/api/download/models/139878\", \"name\": \"MixPro_V4-inpainting.safetensors\"}\n",
244
- " ]\n",
245
- "}\n",
246
- "\n",
247
- "vae_list = {\n",
248
- " \"1.Anime.vae\": [{\"url\": \"https://civitai.com/api/download/models/311162\", \"name\": \"vae-ft-mse-840000-ema-pruned.vae.safetensors\"}],\n",
249
- " \"2.Anything.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors\", \"name\": \"Anything.vae.safetensors\"}],\n",
250
- " \"3.Blessed2.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors\", \"name\": \"Blessed2.vae.safetensors\"}],\n",
251
- " \"4.ClearVae.vae\": [{\"url\": \"https://civitai.com/api/download/models/88156\", \"name\": \"ClearVae_23.vae.safetensors\"}],\n",
252
- " \"5.WD.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors\", \"name\": \"WD.vae.safetensors\"}]\n",
253
- "}\n",
254
- "\n",
255
- "controlnet_list = {\n",
256
- " \"1.canny\": [\n",
257
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors\", \"name\": \"control_v11p_sd15_canny_fp16.safetensors\"},\n",
258
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml\", \"name\": \"control_v11p_sd15_canny_fp16.yaml\"}\n",
259
- " ],\n",
260
- " \"2.openpose\": [\n",
261
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors\", \"name\": \"control_v11p_sd15_openpose_fp16.safetensors\"},\n",
262
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml\", \"name\": \"control_v11p_sd15_openpose_fp16.yaml\"}\n",
263
- " ],\n",
264
- " \"3.depth\": [\n",
265
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors\", \"name\": \"control_v11f1p_sd15_depth_fp16.safetensors\"},\n",
266
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml\", \"name\": \"control_v11f1p_sd15_depth_fp16.yaml\"},\n",
267
- " {\"url\": \"https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors\", \"name\": \"control_v11p_sd15_depth_anything_fp16.safetensors\"}\n",
268
- " ],\n",
269
- " \"4.normal_map\": [\n",
270
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors\", \"name\": \"control_v11p_sd15_normalbae_fp16.safetensors\"},\n",
271
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml\", \"name\": \"control_v11p_sd15_normalbae_fp16.yaml\"}\n",
272
- " ],\n",
273
- " \"5.mlsd\": [\n",
274
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors\", \"name\": \"control_v11p_sd15_mlsd_fp16.safetensors\"},\n",
275
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml\", \"name\": \"control_v11p_sd15_mlsd_fp16.yaml\"}\n",
276
- " ],\n",
277
- " \"6.lineart\": [\n",
278
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors\", \"name\": \"control_v11p_sd15_lineart_fp16.safetensors\"},\n",
279
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.safetensors\"},\n",
280
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml\", \"name\": \"control_v11p_sd15_lineart_fp16.yaml\"},\n",
281
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.yaml\"}\n",
282
- " ],\n",
283
- " \"7.soft_edge\": [\n",
284
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors\", \"name\": \"control_v11p_sd15_softedge_fp16.safetensors\"},\n",
285
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml\", \"name\": \"control_v11p_sd15_softedge_fp16.yaml\"}\n",
286
- " ],\n",
287
- " \"8.scribble\": [\n",
288
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors\", \"name\": \"control_v11p_sd15_scribble_fp16.safetensors\"},\n",
289
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml\", \"name\": \"control_v11p_sd15_scribble_fp16.yaml\"}\n",
290
- " ],\n",
291
- " \"9.segmentation\": [\n",
292
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors\", \"name\": \"control_v11p_sd15_seg_fp16.safetensors\"},\n",
293
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml\", \"name\": \"control_v11p_sd15_seg_fp16.yaml\"}\n",
294
- " ],\n",
295
- " \"10.shuffle\": [\n",
296
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors\", \"name\": \"control_v11e_sd15_shuffle_fp16.safetensors\"},\n",
297
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml\", \"name\": \"control_v11e_sd15_shuffle_fp16.yaml\"}\n",
298
- " ],\n",
299
- " \"11.tile\": [\n",
300
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors\", \"name\": \"control_v11f1e_sd15_tile_fp16.safetensors\"},\n",
301
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml\", \"name\": \"control_v11f1e_sd15_tile_fp16.yaml\"}\n",
302
- " ],\n",
303
- " \"12.inpaint\": [\n",
304
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors\", \"name\": \"control_v11p_sd15_inpaint_fp16.safetensors\"},\n",
305
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml\", \"name\": \"control_v11p_sd15_inpaint_fp16.yaml\"}\n",
306
- " ],\n",
307
- " \"13.instruct_p2p\": [\n",
308
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors\", \"name\": \"control_v11e_sd15_ip2p_fp16.safetensors\"},\n",
309
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml\", \"name\": \"control_v11e_sd15_ip2p_fp16.yaml\"}\n",
310
- " ]\n",
311
- "}\n",
312
- "\n",
313
- "url = \"\"\n",
314
- "prefixes = {\n",
315
- " \"model\": models_dir,\n",
316
- " \"vae\": vaes_dir,\n",
317
- " \"lora\": loras_dir,\n",
318
- " \"embed\": embeddings_dir,\n",
319
- " \"extension\": extensions_dir,\n",
320
- " \"control\": control_dir,\n",
321
- " \"adetailer\": adetailer_dir,\n",
322
- " \"config\": webui_path\n",
323
- "}\n",
324
- "\n",
325
- "extension_repo = []\n",
326
- "directories = [value for key, value in prefixes.items()] # for unpucking zip files\n",
327
- "!mkdir -p {\" \".join(directories)}\n",
328
- "\n",
329
- "hf_token = huggingface_token if huggingface_token else \"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\"\n",
330
- "user_header = f\"\\\"Authorization: Bearer {hf_token}\\\"\"\n",
331
- "\n",
332
- "''' Formatted Info Output '''\n",
333
- "\n",
334
- "from math import floor\n",
335
- "\n",
336
- "def center_text(text, terminal_width=45):\n",
337
- " text_length = len(text)\n",
338
- " left_padding = floor((terminal_width - text_length) / 2)\n",
339
- " right_padding = terminal_width - text_length - left_padding\n",
340
- " return f\"\\033[1m\\033[36m{' ' * left_padding}{text}{' ' * right_padding}\\033[0m\\033[32m\"\n",
341
- "\n",
342
- "def format_output(url, dst_dir, file_name):\n",
343
- " info = f\"[{file_name.split('.')[0]}]\"\n",
344
- " info = center_text(info)\n",
345
- "\n",
346
- " print(f\"\\n\\033[32m{'---'*20}]{info}[{'---'*20}\")\n",
347
- " print(f\"\\033[33mURL: \\033[34m{url}\")\n",
348
- " print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n",
349
- " print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
350
- "\n",
351
- "''' GET CivitAi API - DATA '''\n",
352
- "\n",
353
- "def strip_(url, file_name=None):\n",
354
- " if 'github.com' in url:\n",
355
- " if '/blob/' in url:\n",
356
- " url = url.replace('/blob/', '/raw/')\n",
357
- "\n",
358
- " elif \"civitai.com\" in url:\n",
359
- " return CivitAi_API(url, file_name)\n",
360
- "\n",
361
- " elif \"huggingface.co\" in url:\n",
362
- " if '/blob/' in url:\n",
363
- " url = url.replace('/blob/', '/resolve/')\n",
364
- " if '?' in url:\n",
365
- " url = url.split('?')[0]\n",
366
- "\n",
367
- " return url\n",
368
- "\n",
369
- "def CivitAi_API(url, file_name=None):\n",
370
- " support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')\n",
371
- " civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n",
372
- "\n",
373
- " if '?token=' in url:\n",
374
- " url = url.split('?token=')[0]\n",
375
- " if '?type=' in url:\n",
376
- " url = url.replace('?type=', f'?token={civitai_token}&type=')\n",
377
- " else:\n",
378
- " url = f\"{url}?token={civitai_token}\"\n",
379
- "\n",
380
- " # Determine model or version id\n",
381
- " if \"civitai.com/models/\" in url:\n",
382
- " if '?modelVersionId=' in url:\n",
383
- " version_id = url.split('?modelVersionId=')[1]\n",
384
- " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
385
- " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
386
- " else:\n",
387
- " model_id = url.split('/models/')[1].split('/')[0]\n",
388
- " response = requests.get(f\"https://civitai.com/api/v1/models/{model_id}\")\n",
389
- " # print(f\"end - https://civitai.com/api/v1/models/{model_id}\")\n",
390
- " else:\n",
391
- " version_id = url.split('/models/')[1].split('/')[0]\n",
392
- " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
393
- " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
394
- "\n",
395
- " data = response.json()\n",
396
- "\n",
397
- " if response.status_code != 200:\n",
398
- " return None, None, None, None, None, None, None\n",
399
- "\n",
400
- " # Define model type and name\n",
401
- " if \"civitai.com/models/\" in url:\n",
402
- " if '?modelVersionId=' in url:\n",
403
- " model_type = data['model']['type']\n",
404
- " model_name = data['files'][0]['name']\n",
405
- " else:\n",
406
- " model_type = data['type']\n",
407
- " model_name = data['modelVersions'][0]['files'][0]['name']\n",
408
- " elif 'type=' in url:\n",
409
- " model_type = parse_qs(urlparse(url).query).get('type', [''])[0]\n",
410
- " if 'model' in model_type.lower():\n",
411
- " model_name = data['files'][0]['name']\n",
412
- " else:\n",
413
- " model_name = data['files'][1]['name']\n",
414
- " else:\n",
415
- " model_type = data['model']['type']\n",
416
- " model_name = data['files'][0]['name']\n",
417
- "\n",
418
- " model_name = file_name or model_name\n",
419
- "\n",
420
- " # Determine DownloadUrl\n",
421
- " if \"civitai.com/models/\" in url:\n",
422
- " if '?modelVersionId=' in url:\n",
423
- " download_url = data.get('downloadUrl')\n",
424
- " else:\n",
425
- " download_url = data[\"modelVersions\"][0].get(\"downloadUrl\", \"\")\n",
426
- " elif 'type=' in url:\n",
427
- " if any(t.lower() in model_type.lower() for t in support_types):\n",
428
- " download_url = data['files'][0]['downloadUrl']\n",
429
- " else:\n",
430
- " download_url = data['files'][1]['downloadUrl']\n",
431
- " else:\n",
432
- " download_url = data.get('downloadUrl')\n",
433
- "\n",
434
- " clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token\n",
435
- "\n",
436
- " # Find a safe image: level less than 4 | Kaggle\n",
437
- " image_url, image_name = None, None\n",
438
- " if any(t in model_type for t in support_types):\n",
439
- " try:\n",
440
- " images = data.get('images') or data['modelVersions'][0].get('images', [])\n",
441
- " if env == 'Kaggle':\n",
442
- " image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)\n",
443
- " else:\n",
444
- " image_url = images[0]['url'] if images else None\n",
445
- " except KeyError:\n",
446
- " pass\n",
447
- "\n",
448
- " # Generate a name to save the image\n",
449
- " image_name = f\"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}\" if image_url else None\n",
450
- "\n",
451
- " return f\"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}\", clean_url, model_type, model_name, image_url, image_name, data\n",
452
- "\n",
453
- "''' Main Download Code '''\n",
454
- "\n",
455
- "def download(url):\n",
456
- " links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]\n",
457
- "\n",
458
- " for link_or_path in links_and_paths:\n",
459
- " if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):\n",
460
- " handle_manual(link_or_path)\n",
461
- " else:\n",
462
- " url, dst_dir, file_name = link_or_path.split()\n",
463
- " manual_download(url, dst_dir, file_name)\n",
464
- "\n",
465
- " unpack_zip_files()\n",
466
- "\n",
467
- "def unpack_zip_files():\n",
468
- " for directory in directories:\n",
469
- " for root, _, files in os.walk(directory):\n",
470
- " for file in files:\n",
471
- " if file.endswith(\".zip\"):\n",
472
- " zip_path = os.path.join(root, file)\n",
473
- " extract_path = os.path.splitext(zip_path)[0]\n",
474
- " with zipfile.ZipFile(zip_path, 'r') as zip_ref:\n",
475
- " zip_ref.extractall(extract_path)\n",
476
- " os.remove(zip_path)\n",
477
- "\n",
478
- "def handle_manual(url):\n",
479
- " url_parts = url.split(':', 1)\n",
480
- " prefix, path = url_parts[0], url_parts[1]\n",
481
- "\n",
482
- " file_name_match = re.search(r'\\[(.*?)\\]', path)\n",
483
- " file_name = file_name_match.group(1) if file_name_match else None\n",
484
- " if file_name:\n",
485
- " path = re.sub(r'\\[.*?\\]', '', path)\n",
486
- "\n",
487
- " if prefix in prefixes:\n",
488
- " dir = prefixes[prefix]\n",
489
- " if prefix != \"extension\":\n",
490
- " try:\n",
491
- " manual_download(path, dir, file_name=file_name)\n",
492
- " except Exception as e:\n",
493
- " print(f\"Error downloading file: {e}\")\n",
494
- " else:\n",
495
- " extension_repo.append((path, file_name))\n",
496
- "\n",
497
- "def manual_download(url, dst_dir, file_name):\n",
498
- " aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'\n",
499
- " basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
500
- " header_option = f\"--header={user_header}\"\n",
501
- "\n",
502
- " if 'github.com' in url:\n",
503
- " url = strip_(url)\n",
504
- "\n",
505
- " # -- CivitAi APi+ V2 --\n",
506
- " elif 'civitai' in url:\n",
507
- " url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)\n",
508
- "\n",
509
- " if image_url and image_name:\n",
510
- " with capture.capture_output() as cap:\n",
511
- " !aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'\n",
512
- " del cap\n",
513
- "\n",
514
- " elif \"huggingface.co\" in url:\n",
515
- " clean_url = strip_(url)\n",
516
- "\n",
517
- " \"\"\" Formatted info output \"\"\"\n",
518
- " model_name_or_basename = file_name if not 'huggingface' in url else basename\n",
519
- " format_output(clean_url or url, dst_dir, model_name_or_basename)\n",
520
- "\n",
521
- " # ## -- for my tests --\n",
522
- " # print(url, dst_dir, model_name_or_basename)\n",
523
- " print(f\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\") if 'civitai' in url and not data else None\n",
524
- " if 'civitai' in url and data and image_name:\n",
525
- " print(f\"\\033[32m[Preview DL]:\\033[0m {image_name} - {image_url}\\n\")\n",
526
- " # =====================\n",
527
- "\n",
528
- " # # -- Git Hub --\n",
529
- " if 'github.com' in url or 'githubusercontent.com' in url:\n",
530
- " !aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
531
- "\n",
532
- " # -- GDrive --\n",
533
- " elif 'drive.google' in url:\n",
534
- " try:\n",
535
- " have_drive_link\n",
536
- " except:\n",
537
- " !pip install -U gdown > /dev/null\n",
538
- " have_drive_link = True\n",
539
- "\n",
540
- " if 'folders' in url:\n",
541
- " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n",
542
- " else:\n",
543
- " if file_name:\n",
544
- " !gdown \"{url}\" -O {dst_dir}/{file_name} --fuzzy -c\n",
545
- " else:\n",
546
- " !gdown \"{url}\" -O {dst_dir} --fuzzy -c\n",
547
- "\n",
548
- " # -- Hugging Face --\n",
549
- " elif 'huggingface' in url:\n",
550
- " !aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
551
- "\n",
552
- " # -- Other --\n",
553
- " elif 'http' in url:\n",
554
- " !aria2c {aria2_args} -d {dst_dir} '{'-o' + file_name if file_name else ''}' '{url}'\n",
555
- "\n",
556
- "''' SubModels - Added URLs '''\n",
557
- "\n",
558
- "def add_submodels(selection, num_selection, model_dict, dst_dir):\n",
559
- " if selection == \"none\":\n",
560
- " return []\n",
561
- " if selection == \"ALL\":\n",
562
- " all_models = []\n",
563
- " for models in model_dict.values():\n",
564
- " all_models.extend(models)\n",
565
- " selected_models = all_models\n",
566
- " else:\n",
567
- " selected_models = model_dict[selection]\n",
568
- " selected_nums = map(int, num_selection.replace(',', '').split())\n",
569
- " for num in selected_nums:\n",
570
- " if 1 <= num <= len(model_dict):\n",
571
- " name = list(model_dict)[num - 1]\n",
572
- " selected_models.extend(model_dict[name])\n",
573
- "\n",
574
- " unique_models = list({model['name']: model for model in selected_models}.values())\n",
575
- " for model in unique_models:\n",
576
- " model['dst_dir'] = dst_dir\n",
577
- "\n",
578
- " return unique_models\n",
579
- "\n",
580
- "def handle_submodels(selection, num_selection, model_dict, dst_dir, url):\n",
581
- " submodels = add_submodels(selection, num_selection, model_dict, dst_dir)\n",
582
- " for submodel in submodels:\n",
583
- " if not inpainting_model and \"inpainting\" in submodel['name']:\n",
584
- " continue\n",
585
- " url += f\"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, \"\n",
586
- " return url\n",
587
- "\n",
588
- "url = handle_submodels(model, model_num, model_list, models_dir, url)\n",
589
- "url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)\n",
590
- "url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)\n",
591
- "\n",
592
- "''' file.txt - added urls '''\n",
593
- "\n",
594
- "def process_file_download(file_url, prefixes, unique_urls):\n",
595
- " files_urls = \"\"\n",
596
- "\n",
597
- " if file_url.startswith(\"http\"):\n",
598
- " if \"blob\" in file_url:\n",
599
- " file_url = file_url.replace(\"blob\", \"raw\")\n",
600
- " response = requests.get(file_url)\n",
601
- " lines = response.text.split('\\n')\n",
602
- " else:\n",
603
- " with open(file_url, 'r') as file:\n",
604
- " lines = file.readlines()\n",
605
- "\n",
606
- " current_tag = None\n",
607
- " for line in lines:\n",
608
- " line = line.strip()\n",
609
- " if any(f'# {tag}' in line.lower() for tag in prefixes):\n",
610
- " current_tag = next((tag for tag in prefixes if tag in line.lower()))\n",
611
- "\n",
612
- " urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n",
613
- " for url in urls:\n",
614
- " filter_url = url.split('[')[0] # same url filter\n",
615
- "\n",
616
- " if url.startswith(\"http\") and filter_url not in unique_urls:\n",
617
- " files_urls += f\"{current_tag}:{url}, \"\n",
618
- " unique_urls.add(filter_url)\n",
619
- "\n",
620
- " return files_urls\n",
621
- "\n",
622
- "file_urls = \"\"\n",
623
- "unique_urls = set()\n",
624
- "\n",
625
- "if custom_file_urls:\n",
626
- " for custom_file_url in custom_file_urls.replace(',', '').split():\n",
627
- " if not custom_file_url.endswith('.txt'):\n",
628
- " custom_file_url += '.txt'\n",
629
- " if not custom_file_url.startswith('http'):\n",
630
- " if not custom_file_url.startswith(root_path):\n",
631
- " custom_file_url = f'{root_path}/{custom_file_url}'\n",
632
- "\n",
633
- " try:\n",
634
- " file_urls += process_file_download(custom_file_url, prefixes, unique_urls)\n",
635
- " except FileNotFoundError:\n",
636
- " pass\n",
637
- "\n",
638
- "# url prefixing\n",
639
- "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n",
640
- "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())\n",
641
- "url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
642
- "\n",
643
- "if detailed_download == \"on\":\n",
644
- " print(\"\\n\\n\\033[33m# ====== Подробная Загрузка ====== #\\n\\033[0m\")\n",
645
- " download(url)\n",
646
- " print(\"\\n\\033[33m# =============================== #\\n\\033[0m\")\n",
647
- "else:\n",
648
- " with capture.capture_output() as cap:\n",
649
- " download(url)\n",
650
- " del cap\n",
651
- "\n",
652
- "print(\"\\r🏁 Скачивание Завершено!\" + \" \"*15)\n",
653
- "\n",
654
- "\n",
655
- "# Cleaning shit after downloading...\n",
656
- "!find {webui_path} \\( -type d \\( -name \".ipynb_checkpoints\" -o -name \".aria2\" \\) -o -type f -name \"*.aria2\" \\) -exec rm -r {{}} \\; >/dev/null 2>&1\n",
657
- "\n",
658
- "\n",
659
- "## Install of Custom extensions\n",
660
- "if len(extension_repo) > 0:\n",
661
- " print(\"✨ Установка кастомных расширений...\", end='', flush=True)\n",
662
- " with capture.capture_output() as cap:\n",
663
- " for repo, repo_name in extension_repo:\n",
664
- " if not repo_name:\n",
665
- " repo_name = repo.split('/')[-1]\n",
666
- " !cd {extensions_dir} \\\n",
667
- " && git clone {repo} {repo_name} \\\n",
668
- " && cd {repo_name} \\\n",
669
- " && git fetch\n",
670
- " del cap\n",
671
- " print(f\"\\r📦 Установлено '{len(extension_repo)}', Кастомных расширений!\")\n",
672
- "\n",
673
- "\n",
674
- "## List Models and stuff V2\n",
675
- "if detailed_download == \"off\":\n",
676
- " print(\"\\n\\n\\033[33mЕсли вы не видете каких-то скаченных файлов, включите в виджетах функцию 'Подробная Загрузка'.\")\n",
677
- "\n",
678
- "%run {root_path}/file_cell/special/dl_display_results.py # display widgets result"
679
- ]
680
- }
681
- ],
682
- "metadata": {
683
- "colab": {
684
- "provenance": []
685
- },
686
- "kernelspec": {
687
- "display_name": "Python 3",
688
- "name": "python3"
689
- },
690
- "language_info": {
691
- "name": "python"
692
- }
693
- },
694
- "nbformat": 4,
695
- "nbformat_minor": 0
 
 
696
  }
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "metadata": {
7
+ "id": "2lJmbqrs3Mu8"
8
+ },
9
+ "outputs": [],
10
+ "source": [
11
+ "##~ DOWNLOADING CODE | BY: ANXETY ~##\n",
12
+ "\n",
13
+ "import os\n",
14
+ "import re\n",
15
+ "import time\n",
16
+ "import json\n",
17
+ "import shutil\n",
18
+ "import zipfile\n",
19
+ "import requests\n",
20
+ "import subprocess\n",
21
+ "from datetime import timedelta\n",
22
+ "from subprocess import getoutput\n",
23
+ "from IPython.utils import capture\n",
24
+ "from IPython.display import clear_output\n",
25
+ "from urllib.parse import urlparse, parse_qs\n",
26
+ "\n",
27
+ "\n",
28
+ "# ================= DETECT ENV =================\n",
29
+ "def detect_environment():\n",
30
+ " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
31
+ " environments = {\n",
32
+ " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
33
+ " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
34
+ " }\n",
35
+ " for env_var, (environment, path) in environments.items():\n",
36
+ " if env_var in os.environ:\n",
37
+ " return environment, path, free_plan\n",
38
+ "\n",
39
+ "env, root_path, free_plan = detect_environment()\n",
40
+ "webui_path = f\"{root_path}/sdw\"\n",
41
+ "\n",
42
+ "\n",
43
+ "# ================ LIBRARIES V2 ================\n",
44
+ "flag_file = f\"{root_path}/libraries_installed.txt\"\n",
45
+ "\n",
46
+ "if not os.path.exists(flag_file):\n",
47
+ " print(\"💿 Установка библиотек, это займет какое-то время:\\n\")\n",
48
+ "\n",
49
+ " install_lib = {\n",
50
+ " # \"aria2\": \"apt -y install aria2\",\n",
51
+ " \"aria2\": \"pip install aria2\",\n",
52
+ " \"localtunnel\": \"npm install -g localtunnel\",\n",
53
+ " \"insightface\": \"pip install insightface\"\n",
54
+ " }\n",
55
+ "\n",
56
+ " additional_libs = {\n",
57
+ " \"Google Colab\": {\n",
58
+ " \"xformers\": \"pip install xformers==0.0.27 --no-deps\"\n",
59
+ " },\n",
60
+ " \"Kaggle\": {\n",
61
+ " \"xformers\": \"pip install xformers==0.0.26.post1\",\n",
62
+ " # \"torch\": \"pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\",\n",
63
+ " # \"aiohttp\": \"pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*\" # fix install req\n",
64
+ " }\n",
65
+ " }\n",
66
+ "\n",
67
+ " if env in additional_libs:\n",
68
+ " install_lib.update(additional_libs[env])\n",
69
+ "\n",
70
+ " # Loop through libraries\n",
71
+ " for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):\n",
72
+ " print(f\"\\r[{index}/{len(install_lib)}] \\033[32m>>\\033[0m Installing \\033[33m{package}\\033[0m...\" + \" \"*35, end='')\n",
73
+ " subprocess.run(install_cmd, shell=True, capture_output=True)\n",
74
+ "\n",
75
+ " # Additional specific packages\n",
76
+ " with capture.capture_output() as cap:\n",
77
+ " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
78
+ " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
79
+ " !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.32/zrok_0.4.32_linux_amd64.tar.gz && tar -xzf zrok_0.4.32_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.32_linux_amd64.tar.gz\n",
80
+ " del cap\n",
81
+ "\n",
82
+ " clear_output()\n",
83
+ "\n",
84
+ " # Save file install lib\n",
85
+ " with open(flag_file, \"w\") as f:\n",
86
+ " f.write(\">W<'\")\n",
87
+ "\n",
88
+ " print(\"🍪 Библиотеки установлены!\" + \" \"*35)\n",
89
+ " time.sleep(2)\n",
90
+ " clear_output()\n",
91
+ "\n",
92
+ "\n",
93
+ "# ================= loading settings V4 =================\n",
94
+ "def load_settings(path):\n",
95
+ " if os.path.exists(path):\n",
96
+ " with open(path, 'r') as file:\n",
97
+ " return json.load(file)\n",
98
+ " return {}\n",
99
+ "\n",
100
+ "settings = load_settings(f'{root_path}/settings.json')\n",
101
+ "\n",
102
+ "VARIABLES = [\n",
103
+ " 'model', 'model_num', 'inpainting_model',\n",
104
+ " 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',\n",
105
+ " 'change_webui', 'detailed_download', 'controlnet',\n",
106
+ " 'controlnet_num', 'commit_hash', 'huggingface_token',\n",
107
+ " 'ngrok_token', 'zrok_token', 'commandline_arguments',\n",
108
+ " 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',\n",
109
+ " 'Extensions_url', 'custom_file_urls'\n",
110
+ "]\n",
111
+ "\n",
112
+ "locals().update({key: settings.get(key) for key in VARIABLES})\n",
113
+ "\n",
114
+ "\n",
115
+ "# ================= OTHER =================\n",
116
+ "try:\n",
117
+ " start_colab\n",
118
+ "except:\n",
119
+ " start_colab = int(time.time())-5\n",
120
+ "\n",
121
+ "# CONFIG DIR\n",
122
+ "models_dir = f\"{webui_path}/models/Stable-diffusion\"\n",
123
+ "vaes_dir = f\"{webui_path}/models/VAE\"\n",
124
+ "embeddings_dir = f\"{webui_path}/embeddings\"\n",
125
+ "loras_dir = f\"{webui_path}/models/Lora\"\n",
126
+ "extensions_dir = f\"{webui_path}/extensions\"\n",
127
+ "control_dir = f\"{webui_path}/models/ControlNet\"\n",
128
+ "adetailer_dir = f\"{webui_path}/models/adetailer\"\n",
129
+ "\n",
130
+ "\n",
131
+ "# ================= MAIN CODE =================\n",
132
+ "if not os.path.exists(webui_path):\n",
133
+ " start_install = int(time.time())\n",
134
+ " print(\"⌚ Распаковка Stable Diffusion...\" if change_webui != 'Forge' else \"⌚ Распаковка Stable Diffusion (Forge)...\", end='')\n",
135
+ " with capture.capture_output() as cap:\n",
136
+ " aria2_command = \"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M\"\n",
137
+ " url = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip\" if change_webui != 'Forge' else \"https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip\"\n",
138
+ " !{aria2_command} {url} -o repo.zip\n",
139
+ "\n",
140
+ " !unzip -q -o repo.zip -d {webui_path}\n",
141
+ " !rm -rf repo.zip\n",
142
+ "\n",
143
+ " %cd {root_path}\n",
144
+ " os.environ[\"SAFETENSORS_FAST_GPU\"]='1'\n",
145
+ " os.environ[\"CUDA_MODULE_LOADING\"]=\"LAZY\"\n",
146
+ " os.environ[\"TF_CPP_MIN_LOG_LEVEL\"] = \"3\"\n",
147
+ " os.environ[\"PYTHONWARNINGS\"] = \"ignore\"\n",
148
+ "\n",
149
+ " !echo -n {start_colab} > {webui_path}/static/colabTimer.txt\n",
150
+ " del cap\n",
151
+ " install_time = timedelta(seconds=time.time()-start_install)\n",
152
+ " print(\"\\r🚀 Распаковка Завершена! За\",\"%02d:%02d:%02d ⚡\\n\" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)\n",
153
+ "else:\n",
154
+ " print(\"🚀 Все распакованно... Пропуск. ⚡\")\n",
155
+ " start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
156
+ " time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
157
+ " print(f\"⌚️ Вы проводите эту сессию в течение - \\033[33m{time_since_start}\\033[0m\")\n",
158
+ "\n",
159
+ "\n",
160
+ "## Changes extensions and WebUi\n",
161
+ "if latest_webui or latest_exstensions:\n",
162
+ " action = \"Обновление WebUI и Расширений\" if latest_webui and latest_exstensions else (\"Обновление WebUI\" if latest_webui else \"Обновление Расширений\")\n",
163
+ " print(f\"⌚️ {action}...\", end='', flush=True)\n",
164
+ " with capture.capture_output() as cap:\n",
165
+ " !git config --global user.email \"[email protected]\"\n",
166
+ " !git config --global user.name \"Your Name\"\n",
167
+ "\n",
168
+ " ## Update Webui\n",
169
+ " if latest_webui:\n",
170
+ " %cd {webui_path}\n",
171
+ " !git restore .\n",
172
+ " !git pull -X theirs --rebase --autostash\n",
173
+ "\n",
174
+ " ## Update extensions\n",
175
+ " if latest_exstensions:\n",
176
+ " !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n",
177
+ " del cap\n",
178
+ " print(f\"\\r✨ {action} Завершено!\")\n",
179
+ "\n",
180
+ "\n",
181
+ "# === FIXING EXTENSIONS ===\n",
182
+ "anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n",
183
+ "\n",
184
+ "with capture.capture_output() as cap:\n",
185
+ " # --- Umi-Wildcard ---\n",
186
+ " !sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default\n",
187
+ "\n",
188
+ " # --- Encrypt-Image ---\n",
189
+ " !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui\n",
190
+ "\n",
191
+ " # --- Additional-Networks ---\n",
192
+ " !wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style\n",
193
+ "del cap\n",
194
+ "\n",
195
+ "\n",
196
+ "## Version switching\n",
197
+ "if commit_hash:\n",
198
+ " print('⏳ Активация машины времени...', end=\"\", flush=True)\n",
199
+ " with capture.capture_output() as cap:\n",
200
+ " %cd {webui_path}\n",
201
+ " !git config --global user.email \"[email protected]\"\n",
202
+ " !git config --global user.name \"Your Name\"\n",
203
+ " !git reset --hard {commit_hash}\n",
204
+ " del cap\n",
205
+ " print(f\"\\r⌛️ Машина времени активированна! Текущий коммит: \\033[34m{commit_hash}\\033[0m\")\n",
206
+ "\n",
207
+ "\n",
208
+ "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n",
209
+ "print(\"📦 Скачивание моделей и прочего...\", end='')\n",
210
+ "model_list = {\n",
211
+ " \"1.Anime (by XpucT) + INP\": [\n",
212
+ " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_V2.safetensors\"},\n",
213
+ " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_V2-inpainting.safetensors\"}\n",
214
+ " ],\n",
215
+ " \"2.BluMix [Anime] [V7] + INP\": [\n",
216
+ " {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_V7.safetensors\"},\n",
217
+ " {\"url\": \"https://civitai.com/api/download/models/363850\", \"name\": \"BluMix_V7-inpainting.safetensors\"}\n",
218
+ " ],\n",
219
+ " \"3.Cetus-Mix [Anime] [V4] + INP\": [\n",
220
+ " {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n",
221
+ " {\"url\": \"https://civitai.com/api/download/models/139882\", \"name\": \"CetusMix_V4-inpainting.safetensors\"}\n",
222
+ " ],\n",
223
+ " \"4.Counterfeit [Anime] [V3] + INP\": [\n",
224
+ " {\"url\": \"https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors\", \"name\": \"Counterfeit_V3.safetensors\"},\n",
225
+ " {\"url\": \"https://civitai.com/api/download/models/137911\", \"name\": \"Counterfeit_V3-inpainting.safetensors\"}\n",
226
+ " ],\n",
227
+ " \"5.CuteColor [Anime] [V3]\": [\n",
228
+ " {\"url\": \"https://civitai.com/api/download/models/138754\", \"name\": \"CuteColor_V3.safetensors\"}\n",
229
+ " ],\n",
230
+ " \"6.Dark-Sushi-Mix [Anime]\": [\n",
231
+ " {\"url\": \"https://civitai.com/api/download/models/101640\", \"name\": \"DarkSushiMix_2_5D.safetensors\"},\n",
232
+ " {\"url\": \"https://civitai.com/api/download/models/56071\", \"name\": \"DarkSushiMix_colorful.safetensors\"}\n",
233
+ " ],\n",
234
+ " \"7.Deliberate [Realism] [V6] + INP\": [\n",
235
+ " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors\", \"name\": \"Deliberate_V6.safetensors\"},\n",
236
+ " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors\", \"name\": \"Deliberate_V6-inpainting.safetensors\"}\n",
237
+ " ],\n",
238
+ " \"8.Meina-Mix [Anime] [V11] + INP\": [\n",
239
+ " {\"url\": \"https://civitai.com/api/download/models/119057\", \"name\": \"MeinaMix_V11.safetensors\"},\n",
240
+ " {\"url\": \"https://civitai.com/api/download/models/120702\", \"name\": \"MeinaMix_V11-inpainting.safetensors\"}\n",
241
+ " ],\n",
242
+ " \"9.Mix-Pro [Anime] [V4] + INP\": [\n",
243
+ " {\"url\": \"https://civitai.com/api/download/models/125668\", \"name\": \"MixPro_V4.safetensors\"},\n",
244
+ " {\"url\": \"https://civitai.com/api/download/models/139878\", \"name\": \"MixPro_V4-inpainting.safetensors\"}\n",
245
+ " ]\n",
246
+ "}\n",
247
+ "\n",
248
+ "vae_list = {\n",
249
+ " \"1.Anime.vae\": [{\"url\": \"https://civitai.com/api/download/models/311162\", \"name\": \"Anime.vae.safetensors\"}],\n",
250
+ " \"2.Anything.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors\", \"name\": \"Anything.vae.safetensors\"}],\n",
251
+ " \"3.Blessed2.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors\", \"name\": \"Blessed2.vae.safetensors\"}],\n",
252
+ " \"4.ClearVae.vae\": [{\"url\": \"https://civitai.com/api/download/models/88156\", \"name\": \"ClearVae_23.vae.safetensors\"}],\n",
253
+ " \"5.WD.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors\", \"name\": \"WD.vae.safetensors\"}]\n",
254
+ "}\n",
255
+ "\n",
256
+ "controlnet_list = {\n",
257
+ " \"1.canny\": [\n",
258
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors\", \"name\": \"control_v11p_sd15_canny_fp16.safetensors\"},\n",
259
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml\", \"name\": \"control_v11p_sd15_canny_fp16.yaml\"}\n",
260
+ " ],\n",
261
+ " \"2.openpose\": [\n",
262
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors\", \"name\": \"control_v11p_sd15_openpose_fp16.safetensors\"},\n",
263
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml\", \"name\": \"control_v11p_sd15_openpose_fp16.yaml\"}\n",
264
+ " ],\n",
265
+ " \"3.depth\": [\n",
266
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors\", \"name\": \"control_v11f1p_sd15_depth_fp16.safetensors\"},\n",
267
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml\", \"name\": \"control_v11f1p_sd15_depth_fp16.yaml\"},\n",
268
+ " {\"url\": \"https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors\", \"name\": \"control_v11p_sd15_depth_anything_fp16.safetensors\"}\n",
269
+ " ],\n",
270
+ " \"4.normal_map\": [\n",
271
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors\", \"name\": \"control_v11p_sd15_normalbae_fp16.safetensors\"},\n",
272
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml\", \"name\": \"control_v11p_sd15_normalbae_fp16.yaml\"}\n",
273
+ " ],\n",
274
+ " \"5.mlsd\": [\n",
275
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors\", \"name\": \"control_v11p_sd15_mlsd_fp16.safetensors\"},\n",
276
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml\", \"name\": \"control_v11p_sd15_mlsd_fp16.yaml\"}\n",
277
+ " ],\n",
278
+ " \"6.lineart\": [\n",
279
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors\", \"name\": \"control_v11p_sd15_lineart_fp16.safetensors\"},\n",
280
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.safetensors\"},\n",
281
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml\", \"name\": \"control_v11p_sd15_lineart_fp16.yaml\"},\n",
282
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.yaml\"}\n",
283
+ " ],\n",
284
+ " \"7.soft_edge\": [\n",
285
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors\", \"name\": \"control_v11p_sd15_softedge_fp16.safetensors\"},\n",
286
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml\", \"name\": \"control_v11p_sd15_softedge_fp16.yaml\"}\n",
287
+ " ],\n",
288
+ " \"8.scribble\": [\n",
289
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors\", \"name\": \"control_v11p_sd15_scribble_fp16.safetensors\"},\n",
290
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml\", \"name\": \"control_v11p_sd15_scribble_fp16.yaml\"}\n",
291
+ " ],\n",
292
+ " \"9.segmentation\": [\n",
293
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors\", \"name\": \"control_v11p_sd15_seg_fp16.safetensors\"},\n",
294
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml\", \"name\": \"control_v11p_sd15_seg_fp16.yaml\"}\n",
295
+ " ],\n",
296
+ " \"10.shuffle\": [\n",
297
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors\", \"name\": \"control_v11e_sd15_shuffle_fp16.safetensors\"},\n",
298
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml\", \"name\": \"control_v11e_sd15_shuffle_fp16.yaml\"}\n",
299
+ " ],\n",
300
+ " \"11.tile\": [\n",
301
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors\", \"name\": \"control_v11f1e_sd15_tile_fp16.safetensors\"},\n",
302
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml\", \"name\": \"control_v11f1e_sd15_tile_fp16.yaml\"}\n",
303
+ " ],\n",
304
+ " \"12.inpaint\": [\n",
305
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors\", \"name\": \"control_v11p_sd15_inpaint_fp16.safetensors\"},\n",
306
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml\", \"name\": \"control_v11p_sd15_inpaint_fp16.yaml\"}\n",
307
+ " ],\n",
308
+ " \"13.instruct_p2p\": [\n",
309
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors\", \"name\": \"control_v11e_sd15_ip2p_fp16.safetensors\"},\n",
310
+ " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml\", \"name\": \"control_v11e_sd15_ip2p_fp16.yaml\"}\n",
311
+ " ]\n",
312
+ "}\n",
313
+ "\n",
314
+ "url = \"\"\n",
315
+ "prefixes = {\n",
316
+ " \"model\": models_dir,\n",
317
+ " \"vae\": vaes_dir,\n",
318
+ " \"lora\": loras_dir,\n",
319
+ " \"embed\": embeddings_dir,\n",
320
+ " \"extension\": extensions_dir,\n",
321
+ " \"control\": control_dir,\n",
322
+ " \"adetailer\": adetailer_dir,\n",
323
+ " \"config\": webui_path\n",
324
+ "}\n",
325
+ "\n",
326
+ "extension_repo = []\n",
327
+ "directories = [value for key, value in prefixes.items()] # for unpucking zip files\n",
328
+ "!mkdir -p {\" \".join(directories)}\n",
329
+ "\n",
330
+ "hf_token = huggingface_token if huggingface_token else \"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\"\n",
331
+ "user_header = f\"\\\"Authorization: Bearer {hf_token}\\\"\"\n",
332
+ "\n",
333
+ "''' Formatted Info Output '''\n",
334
+ "\n",
335
+ "from math import floor\n",
336
+ "\n",
337
+ "def center_text(text, terminal_width=45):\n",
338
+ " text_length = len(text)\n",
339
+ " left_padding = floor((terminal_width - text_length) / 2)\n",
340
+ " right_padding = terminal_width - text_length - left_padding\n",
341
+ " return f\"\\033[1m\\033[36m{' ' * left_padding}{text}{' ' * right_padding}\\033[0m\\033[32m\"\n",
342
+ "\n",
343
+ "def format_output(url, dst_dir, file_name):\n",
344
+ " info = f\"[{file_name.split('.')[0]}]\"\n",
345
+ " info = center_text(info)\n",
346
+ "\n",
347
+ " print(f\"\\n\\033[32m{'---'*20}]{info}[{'---'*20}\")\n",
348
+ " print(f\"\\033[33mURL: \\033[34m{url}\")\n",
349
+ " print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n",
350
+ " print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
351
+ "\n",
352
+ "''' GET CivitAi API - DATA '''\n",
353
+ "\n",
354
+ "def strip_(url, file_name=None):\n",
355
+ " if 'github.com' in url:\n",
356
+ " if '/blob/' in url:\n",
357
+ " url = url.replace('/blob/', '/raw/')\n",
358
+ "\n",
359
+ " elif \"civitai.com\" in url:\n",
360
+ " return CivitAi_API(url, file_name)\n",
361
+ "\n",
362
+ " elif \"huggingface.co\" in url:\n",
363
+ " if '/blob/' in url:\n",
364
+ " url = url.replace('/blob/', '/resolve/')\n",
365
+ " if '?' in url:\n",
366
+ " url = url.split('?')[0]\n",
367
+ "\n",
368
+ " return url\n",
369
+ "\n",
370
+ "def CivitAi_API(url, file_name=None):\n",
371
+ " support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')\n",
372
+ " civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n",
373
+ "\n",
374
+ " if '?token=' in url:\n",
375
+ " url = url.split('?token=')[0]\n",
376
+ " if '?type=' in url:\n",
377
+ " url = url.replace('?type=', f'?token={civitai_token}&type=')\n",
378
+ " else:\n",
379
+ " url = f\"{url}?token={civitai_token}\"\n",
380
+ "\n",
381
+ " # Determine model or version id\n",
382
+ " if \"civitai.com/models/\" in url:\n",
383
+ " if '?modelVersionId=' in url:\n",
384
+ " version_id = url.split('?modelVersionId=')[1]\n",
385
+ " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
386
+ " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
387
+ " else:\n",
388
+ " model_id = url.split('/models/')[1].split('/')[0]\n",
389
+ " response = requests.get(f\"https://civitai.com/api/v1/models/{model_id}\")\n",
390
+ " # print(f\"end - https://civitai.com/api/v1/models/{model_id}\")\n",
391
+ " else:\n",
392
+ " version_id = url.split('/models/')[1].split('/')[0]\n",
393
+ " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
394
+ " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
395
+ "\n",
396
+ " data = response.json()\n",
397
+ "\n",
398
+ " if response.status_code != 200:\n",
399
+ " return None, None, None, None, None, None, None\n",
400
+ "\n",
401
+ " # Define model type and name\n",
402
+ " if \"civitai.com/models/\" in url:\n",
403
+ " if '?modelVersionId=' in url:\n",
404
+ " model_type = data['model']['type']\n",
405
+ " model_name = data['files'][0]['name']\n",
406
+ " else:\n",
407
+ " model_type = data['type']\n",
408
+ " model_name = data['modelVersions'][0]['files'][0]['name']\n",
409
+ " elif 'type=' in url:\n",
410
+ " model_type = parse_qs(urlparse(url).query).get('type', [''])[0]\n",
411
+ " if 'model' in model_type.lower():\n",
412
+ " model_name = data['files'][0]['name']\n",
413
+ " else:\n",
414
+ " model_name = data['files'][1]['name']\n",
415
+ " else:\n",
416
+ " model_type = data['model']['type']\n",
417
+ " model_name = data['files'][0]['name']\n",
418
+ "\n",
419
+ " model_name = file_name or model_name\n",
420
+ "\n",
421
+ " # Determine DownloadUrl\n",
422
+ " if \"civitai.com/models/\" in url:\n",
423
+ " if '?modelVersionId=' in url:\n",
424
+ " download_url = data.get('downloadUrl')\n",
425
+ " else:\n",
426
+ " download_url = data[\"modelVersions\"][0].get(\"downloadUrl\", \"\")\n",
427
+ " elif 'type=' in url:\n",
428
+ " if any(t.lower() in model_type.lower() for t in support_types):\n",
429
+ " download_url = data['files'][0]['downloadUrl']\n",
430
+ " else:\n",
431
+ " download_url = data['files'][1]['downloadUrl']\n",
432
+ " else:\n",
433
+ " download_url = data.get('downloadUrl')\n",
434
+ "\n",
435
+ " clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token\n",
436
+ "\n",
437
+ " # Find a safe image: level less than 4 | Kaggle\n",
438
+ " image_url, image_name = None, None\n",
439
+ " if any(t in model_type for t in support_types):\n",
440
+ " try:\n",
441
+ " images = data.get('images') or data['modelVersions'][0].get('images', [])\n",
442
+ " if env == 'Kaggle':\n",
443
+ " image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)\n",
444
+ " else:\n",
445
+ " image_url = images[0]['url'] if images else None\n",
446
+ " except KeyError:\n",
447
+ " pass\n",
448
+ "\n",
449
+ " # Generate a name to save the image\n",
450
+ " image_name = f\"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}\" if image_url else None\n",
451
+ "\n",
452
+ " return f\"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}\", clean_url, model_type, model_name, image_url, image_name, data\n",
453
+ "\n",
454
+ "''' Main Download Code '''\n",
455
+ "\n",
456
+ "def download(url):\n",
457
+ " links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]\n",
458
+ "\n",
459
+ " for link_or_path in links_and_paths:\n",
460
+ " if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):\n",
461
+ " handle_manual(link_or_path)\n",
462
+ " else:\n",
463
+ " url, dst_dir, file_name = link_or_path.split()\n",
464
+ " manual_download(url, dst_dir, file_name)\n",
465
+ "\n",
466
+ " unpack_zip_files()\n",
467
+ "\n",
468
+ "def unpack_zip_files():\n",
469
+ " for directory in directories:\n",
470
+ " for root, _, files in os.walk(directory):\n",
471
+ " for file in files:\n",
472
+ " if file.endswith(\".zip\"):\n",
473
+ " zip_path = os.path.join(root, file)\n",
474
+ " extract_path = os.path.splitext(zip_path)[0]\n",
475
+ " with zipfile.ZipFile(zip_path, 'r') as zip_ref:\n",
476
+ " zip_ref.extractall(extract_path)\n",
477
+ " os.remove(zip_path)\n",
478
+ "\n",
479
+ "def handle_manual(url):\n",
480
+ " url_parts = url.split(':', 1)\n",
481
+ " prefix, path = url_parts[0], url_parts[1]\n",
482
+ "\n",
483
+ " file_name_match = re.search(r'\\[(.*?)\\]', path)\n",
484
+ " file_name = file_name_match.group(1) if file_name_match else None\n",
485
+ " if file_name:\n",
486
+ " path = re.sub(r'\\[.*?\\]', '', path)\n",
487
+ "\n",
488
+ " if prefix in prefixes:\n",
489
+ " dir = prefixes[prefix]\n",
490
+ " if prefix != \"extension\":\n",
491
+ " try:\n",
492
+ " manual_download(path, dir, file_name=file_name)\n",
493
+ " except Exception as e:\n",
494
+ " print(f\"Error downloading file: {e}\")\n",
495
+ " else:\n",
496
+ " extension_repo.append((path, file_name))\n",
497
+ "\n",
498
+ "def manual_download(url, dst_dir, file_name):\n",
499
+ " header_option = f\"--header={user_header}\"\n",
500
+ " aria2c_header = \"--header='User-Agent: Mozilla/5.0' --allow-overwrite=true\"\n",
501
+ " aria2_args = \"--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5\"\n",
502
+ "\n",
503
+ " if 'github.com' in url:\n",
504
+ " url = strip_(url)\n",
505
+ "\n",
506
+ " # -- CivitAi APi+ V2 --\n",
507
+ " elif 'civitai' in url:\n",
508
+ " url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)\n",
509
+ "\n",
510
+ " if image_url and image_name:\n",
511
+ " with capture.capture_output() as cap:\n",
512
+ " !aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'\n",
513
+ " del cap\n",
514
+ "\n",
515
+ " elif \"huggingface.co\" in url:\n",
516
+ " clean_url = strip_(url)\n",
517
+ " basename = clean_url.split(\"/\")[-1] if file_name is None else file_name\n",
518
+ "\n",
519
+ " \"\"\" Formatted info output \"\"\"\n",
520
+ " model_name_or_basename = file_name if not 'huggingface' in url else basename\n",
521
+ " format_output(clean_url or url, dst_dir, model_name_or_basename)\n",
522
+ "\n",
523
+ " # ## -- for my tests --\n",
524
+ " # print(url, dst_dir, model_name_or_basename)\n",
525
+ " print(f\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\") if 'civitai' in url and not data else None\n",
526
+ " if 'civitai' in url and data and image_name:\n",
527
+ " print(f\"\\033[32m[Preview DL]:\\033[0m {image_name} - {image_url}\\n\")\n",
528
+ " # =====================\n",
529
+ "\n",
530
+ " # -- Git Hub --\n",
531
+ " if 'github.com' in url or 'githubusercontent.com' in url:\n",
532
+ " !aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
533
+ "\n",
534
+ " # -- GDrive --\n",
535
+ " elif 'drive.google' in url:\n",
536
+ " try:\n",
537
+ " have_drive_link\n",
538
+ " except:\n",
539
+ " !pip install -q gdown==5.2.0 > /dev/null\n",
540
+ " have_drive_link = True\n",
541
+ "\n",
542
+ " if 'folders' in url:\n",
543
+ " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n",
544
+ " else:\n",
545
+ " if file_name:\n",
546
+ " !gdown \"{url}\" -O {dst_dir}/{file_name} --fuzzy -c\n",
547
+ " else:\n",
548
+ " !gdown \"{url}\" -O {dst_dir} --fuzzy -c\n",
549
+ "\n",
550
+ " # -- Hugging Face --\n",
551
+ " elif 'huggingface' in url:\n",
552
+ " !aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
553
+ "\n",
554
+ " # -- Other --\n",
555
+ " elif 'http' in url:\n",
556
+ " !aria2c {aria2c_header} {aria2_args} -d {dst_dir} -o \"{file_name if file_name else ''}\" '{url}'\n",
557
+ "\n",
558
+ "''' SubModels - Added URLs '''\n",
559
+ "\n",
560
+ "def add_submodels(selection, num_selection, model_dict, dst_dir):\n",
561
+ " if selection == \"none\":\n",
562
+ " return []\n",
563
+ " if selection == \"ALL\":\n",
564
+ " all_models = []\n",
565
+ " for models in model_dict.values():\n",
566
+ " all_models.extend(models)\n",
567
+ " selected_models = all_models\n",
568
+ " else:\n",
569
+ " selected_models = model_dict[selection]\n",
570
+ " selected_nums = map(int, num_selection.replace(',', '').split())\n",
571
+ " for num in selected_nums:\n",
572
+ " if 1 <= num <= len(model_dict):\n",
573
+ " name = list(model_dict)[num - 1]\n",
574
+ " selected_models.extend(model_dict[name])\n",
575
+ "\n",
576
+ " unique_models = list({model['name']: model for model in selected_models}.values())\n",
577
+ " for model in unique_models:\n",
578
+ " model['dst_dir'] = dst_dir\n",
579
+ "\n",
580
+ " return unique_models\n",
581
+ "\n",
582
+ "def handle_submodels(selection, num_selection, model_dict, dst_dir, url):\n",
583
+ " submodels = add_submodels(selection, num_selection, model_dict, dst_dir)\n",
584
+ " for submodel in submodels:\n",
585
+ " if not inpainting_model and \"inpainting\" in submodel['name']:\n",
586
+ " continue\n",
587
+ " url += f\"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, \"\n",
588
+ " return url\n",
589
+ "\n",
590
+ "url = handle_submodels(model, model_num, model_list, models_dir, url)\n",
591
+ "url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)\n",
592
+ "url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)\n",
593
+ "\n",
594
+ "''' file.txt - added urls '''\n",
595
+ "\n",
596
+ "def process_file_download(file_url, prefixes, unique_urls):\n",
597
+ " files_urls = \"\"\n",
598
+ "\n",
599
+ " if file_url.startswith(\"http\"):\n",
600
+ " if \"blob\" in file_url:\n",
601
+ " file_url = file_url.replace(\"blob\", \"raw\")\n",
602
+ " response = requests.get(file_url)\n",
603
+ " lines = response.text.split('\\n')\n",
604
+ " else:\n",
605
+ " with open(file_url, 'r') as file:\n",
606
+ " lines = file.readlines()\n",
607
+ "\n",
608
+ " current_tag = None\n",
609
+ " for line in lines:\n",
610
+ " line = line.strip()\n",
611
+ " if any(f'# {tag}' in line.lower() for tag in prefixes):\n",
612
+ " current_tag = next((tag for tag in prefixes if tag in line.lower()))\n",
613
+ "\n",
614
+ " urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n",
615
+ " for url in urls:\n",
616
+ " filter_url = url.split('[')[0] # same url filter\n",
617
+ "\n",
618
+ " if url.startswith(\"http\") and filter_url not in unique_urls:\n",
619
+ " files_urls += f\"{current_tag}:{url}, \"\n",
620
+ " unique_urls.add(filter_url)\n",
621
+ "\n",
622
+ " return files_urls\n",
623
+ "\n",
624
+ "file_urls = \"\"\n",
625
+ "unique_urls = set()\n",
626
+ "\n",
627
+ "if custom_file_urls:\n",
628
+ " for custom_file_url in custom_file_urls.replace(',', '').split():\n",
629
+ " if not custom_file_url.endswith('.txt'):\n",
630
+ " custom_file_url += '.txt'\n",
631
+ " if not custom_file_url.startswith('http'):\n",
632
+ " if not custom_file_url.startswith(root_path):\n",
633
+ " custom_file_url = f'{root_path}/{custom_file_url}'\n",
634
+ "\n",
635
+ " try:\n",
636
+ " file_urls += process_file_download(custom_file_url, prefixes, unique_urls)\n",
637
+ " except FileNotFoundError:\n",
638
+ " pass\n",
639
+ "\n",
640
+ "# url prefixing\n",
641
+ "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n",
642
+ "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())\n",
643
+ "url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
644
+ "\n",
645
+ "if detailed_download == \"on\":\n",
646
+ " print(\"\\n\\n\\033[33m# ====== Подробная Загрузка ====== #\\n\\033[0m\")\n",
647
+ " download(url)\n",
648
+ " print(\"\\n\\033[33m# =============================== #\\n\\033[0m\")\n",
649
+ "else:\n",
650
+ " with capture.capture_output() as cap:\n",
651
+ " download(url)\n",
652
+ " del cap\n",
653
+ "\n",
654
+ "print(\"\\r🏁 Скачивание Завершено!\" + \" \"*15)\n",
655
+ "\n",
656
+ "\n",
657
+ "# Cleaning shit after downloading...\n",
658
+ "!find {webui_path} \\( -type d \\( -name \".ipynb_checkpoints\" -o -name \".aria2\" \\) -o -type f -name \"*.aria2\" \\) -exec rm -r {{}} \\; >/dev/null 2>&1\n",
659
+ "\n",
660
+ "\n",
661
+ "## Install of Custom extensions\n",
662
+ "if len(extension_repo) > 0:\n",
663
+ " print(\"✨ Установка кастомных расширений...\", end='', flush=True)\n",
664
+ " with capture.capture_output() as cap:\n",
665
+ " for repo, repo_name in extension_repo:\n",
666
+ " if not repo_name:\n",
667
+ " repo_name = repo.split('/')[-1]\n",
668
+ " !cd {extensions_dir} \\\n",
669
+ " && git clone {repo} {repo_name} \\\n",
670
+ " && cd {repo_name} \\\n",
671
+ " && git fetch\n",
672
+ " del cap\n",
673
+ " print(f\"\\r📦 Установлено '{len(extension_repo)}', Кастомных расширений!\")\n",
674
+ "\n",
675
+ "\n",
676
+ "## List Models and stuff V2\n",
677
+ "if detailed_download == \"off\":\n",
678
+ " print(\"\\n\\n\\033[33mЕсли вы не видете каких-то скаченных файлов, включите в виджетах функцию 'Подробная Загрузка'.\")\n",
679
+ "\n",
680
+ "%run {root_path}/file_cell/special/dl_display_results.py # display widgets result"
681
+ ]
682
+ }
683
+ ],
684
+ "metadata": {
685
+ "colab": {
686
+ "provenance": []
687
+ },
688
+ "kernelspec": {
689
+ "display_name": "Python 3",
690
+ "name": "python3"
691
+ },
692
+ "language_info": {
693
+ "name": "python"
694
+ }
695
+ },
696
+ "nbformat": 4,
697
+ "nbformat_minor": 0
698
  }
files_cells/notebooks/ru/launch_ru.ipynb CHANGED
@@ -1,145 +1,145 @@
1
- {
2
- "nbformat": 4,
3
- "nbformat_minor": 0,
4
- "metadata": {
5
- "colab": {
6
- "provenance": []
7
- },
8
- "kernelspec": {
9
- "name": "python3",
10
- "display_name": "Python 3"
11
- },
12
- "language_info": {
13
- "name": "python"
14
- }
15
- },
16
- "cells": [
17
- {
18
- "cell_type": "code",
19
- "execution_count": null,
20
- "metadata": {
21
- "id": "JKTCrY9LU7Oq"
22
- },
23
- "outputs": [],
24
- "source": [
25
- "##~ LAUNCH CODE | BY: ANXETY ~##\n",
26
- "\n",
27
- "import os\n",
28
- "import re\n",
29
- "import time\n",
30
- "import json\n",
31
- "import requests\n",
32
- "import cloudpickle as pickle\n",
33
- "from datetime import timedelta\n",
34
- "from IPython.display import clear_output\n",
35
- "\n",
36
- "# ================= DETECT ENV =================\n",
37
- "def detect_environment():\n",
38
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
39
- " environments = {\n",
40
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
41
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
42
- " }\n",
43
- "\n",
44
- " for env_var, (environment, path) in environments.items():\n",
45
- " if env_var in os.environ:\n",
46
- " return environment, path, free_plan\n",
47
- " return 'Unknown', '/unknown/path', free_plan\n",
48
- "\n",
49
- "env, root_path, free_plan = detect_environment()\n",
50
- "webui_path = f\"{root_path}/sdw\"\n",
51
- "\n",
52
- "def load_settings():\n",
53
- " SETTINGS_FILE = f'{root_path}/settings.json'\n",
54
- " if os.path.exists(SETTINGS_FILE):\n",
55
- " with open(SETTINGS_FILE, 'r') as f:\n",
56
- " return json.load(f)\n",
57
- " return {}\n",
58
- "\n",
59
- "settings = load_settings()\n",
60
- "ngrok_token = settings.get('ngrok_token', \"\")\n",
61
- "zrok_token = settings.get('zrok_token', \"\")\n",
62
- "commandline_arguments = settings.get('commandline_arguments', \"\")\n",
63
- "change_webui = settings.get('change_webui', \"\")\n",
64
- "\n",
65
- "# ======================== TUNNEL V2 ========================\n",
66
- "print('Please Wait...')\n",
67
- "\n",
68
- "def get_public_ip(version='ipv4'):\n",
69
- " try:\n",
70
- " url = f'https://api64.ipify.org?format=json&{version}=true'\n",
71
- " response = requests.get(url)\n",
72
- " return response.json().get('ip', 'N/A')\n",
73
- " except Exception as e:\n",
74
- " print(f\"Error getting public {version} address:\", e)\n",
75
- "\n",
76
- "# Check if public IP is already saved, if not then get it\n",
77
- "public_ip_file = f\"{root_path}/public_ip.txt\"\n",
78
- "if os.path.exists(public_ip_file):\n",
79
- " with open(public_ip_file, 'r') as file:\n",
80
- " public_ipv4 = file.read().strip()\n",
81
- "else:\n",
82
- " public_ipv4 = get_public_ip(version='ipv4')\n",
83
- " with open(public_ip_file, 'w') as file:\n",
84
- " file.write(public_ipv4)\n",
85
- "\n",
86
- "tunnel_class = pickle.load(open(f\"{root_path}/new_tunnel\", \"rb\"), encoding=\"utf-8\")\n",
87
- "tunnel_port = 1834\n",
88
- "tunnel = tunnel_class(tunnel_port)\n",
89
- "tunnel.add_tunnel(command=\"cl tunnel --url localhost:{port}\", name=\"cl\", pattern=re.compile(r\"[\\w-]+\\.trycloudflare\\.com\"))\n",
90
- "tunnel.add_tunnel(command=\"lt --port {port}\", name=\"lt\", pattern=re.compile(r\"[\\w-]+\\.loca\\.lt\"), note=\"Password : \" + \"\\033[32m\" + public_ipv4 + \"\\033[0m\" + \" rerun cell if 404 error.\")\n",
91
- "\n",
92
- "if zrok_token:\n",
93
- " !zrok enable {zrok_token} &> /dev/null\n",
94
- " tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
95
- "\n",
96
- "clear_output()\n",
97
- "\n",
98
- "# =============== Automatic Fixing Path V3 ===============\n",
99
- "paths_to_check = {\n",
100
- " \"tagger_hf_cache_dir\": f\"{webui_path}/models/interrogators/\",\n",
101
- " \"additional_networks_extra_lora_path\": f\"{webui_path}/models/Lora/\",\n",
102
- " \"ad_extra_models_dir\": f\"{webui_path}/models/adetailer/\",\n",
103
- " \"sd_checkpoint_hash\": \"\",\n",
104
- " \"sd_model_checkpoint\": \"\",\n",
105
- " \"sd_vae\": \"None\"\n",
106
- "}\n",
107
- "\n",
108
- "config_path = f'{webui_path}/ui-config.json'\n",
109
- "\n",
110
- "if os.path.exists(config_path):\n",
111
- " with open(config_path, 'r') as file:\n",
112
- " config_data = json.load(file)\n",
113
- "\n",
114
- " for key, value in paths_to_check.items():\n",
115
- " if key in config_data and config_data[key] != value:\n",
116
- " sed_command = f\"sed -i 's|\\\"{key}\\\": \\\".*\\\"|\\\"{key}\\\": \\\"{value}\\\"|' {config_path}\"\n",
117
- " os.system(sed_command)\n",
118
- "\n",
119
- " if env == 'Kaggle':\n",
120
- " get_ipython().system('sed -i \\'s/\"civitai_interface\\\\/NSFW content\\\\/value\":.*/\"civitai_interface\\\\/NSFW content\\\\/value\": false/g\\' {webui_path}/ui-config.json')\n",
121
- "\n",
122
- "with tunnel:\n",
123
- " %cd {webui_path}\n",
124
- "\n",
125
- " commandline_arguments += f' --port={tunnel_port}'\n",
126
- " if ngrok_token:\n",
127
- " commandline_arguments += f' --ngrok {ngrok_token}'\n",
128
- " if env != \"Google Colab\":\n",
129
- " commandline_arguments += f' --encrypt-pass={tunnel_port} --api'\n",
130
- "\n",
131
- " if change_webui == 'Forge':\n",
132
- " commandline_arguments += ' --cuda-stream --pin-shared-memory'\n",
133
- "\n",
134
- " !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
135
- "\n",
136
- "start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
137
- "time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
138
- "print(f\"\\n⌚️ \\033[0mВы проводите эту сессию в течение - \\033[33m{time_since_start}\\033[0m\\n\\n\")\n",
139
- "\n",
140
- "if zrok_token:\n",
141
- " !zrok disable &> /dev/null"
142
- ]
143
- }
144
- ]
145
  }
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "provenance": []
7
+ },
8
+ "kernelspec": {
9
+ "name": "python3",
10
+ "display_name": "Python 3"
11
+ },
12
+ "language_info": {
13
+ "name": "python"
14
+ }
15
+ },
16
+ "cells": [
17
+ {
18
+ "cell_type": "code",
19
+ "execution_count": null,
20
+ "metadata": {
21
+ "id": "JKTCrY9LU7Oq"
22
+ },
23
+ "outputs": [],
24
+ "source": [
25
+ "##~ LAUNCH CODE | BY: ANXETY ~##\n",
26
+ "\n",
27
+ "import os\n",
28
+ "import re\n",
29
+ "import time\n",
30
+ "import json\n",
31
+ "import requests\n",
32
+ "import cloudpickle as pickle\n",
33
+ "from datetime import timedelta\n",
34
+ "from IPython.display import clear_output\n",
35
+ "\n",
36
+ "# ================= DETECT ENV =================\n",
37
+ "def detect_environment():\n",
38
+ " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
39
+ " environments = {\n",
40
+ " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
41
+ " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
42
+ " }\n",
43
+ "\n",
44
+ " for env_var, (environment, path) in environments.items():\n",
45
+ " if env_var in os.environ:\n",
46
+ " return environment, path, free_plan\n",
47
+ " return 'Unknown', '/unknown/path', free_plan\n",
48
+ "\n",
49
+ "env, root_path, free_plan = detect_environment()\n",
50
+ "webui_path = f\"{root_path}/sdw\"\n",
51
+ "\n",
52
+ "def load_settings():\n",
53
+ " SETTINGS_FILE = f'{root_path}/settings.json'\n",
54
+ " if os.path.exists(SETTINGS_FILE):\n",
55
+ " with open(SETTINGS_FILE, 'r') as f:\n",
56
+ " return json.load(f)\n",
57
+ " return {}\n",
58
+ "\n",
59
+ "settings = load_settings()\n",
60
+ "ngrok_token = settings.get('ngrok_token', \"\")\n",
61
+ "zrok_token = settings.get('zrok_token', \"\")\n",
62
+ "commandline_arguments = settings.get('commandline_arguments', \"\")\n",
63
+ "change_webui = settings.get('change_webui', \"\")\n",
64
+ "\n",
65
+ "# ======================== TUNNEL V2 ========================\n",
66
+ "print('Please Wait...')\n",
67
+ "\n",
68
+ "def get_public_ip(version='ipv4'):\n",
69
+ " try:\n",
70
+ " url = f'https://api64.ipify.org?format=json&{version}=true'\n",
71
+ " response = requests.get(url)\n",
72
+ " return response.json().get('ip', 'N/A')\n",
73
+ " except Exception as e:\n",
74
+ " print(f\"Error getting public {version} address:\", e)\n",
75
+ "\n",
76
+ "# Check if public IP is already saved, if not then get it\n",
77
+ "public_ip_file = f\"{root_path}/public_ip.txt\"\n",
78
+ "if os.path.exists(public_ip_file):\n",
79
+ " with open(public_ip_file, 'r') as file:\n",
80
+ " public_ipv4 = file.read().strip()\n",
81
+ "else:\n",
82
+ " public_ipv4 = get_public_ip(version='ipv4')\n",
83
+ " with open(public_ip_file, 'w') as file:\n",
84
+ " file.write(public_ipv4)\n",
85
+ "\n",
86
+ "tunnel_class = pickle.load(open(f\"{root_path}/new_tunnel\", \"rb\"), encoding=\"utf-8\")\n",
87
+ "tunnel_port = 1834\n",
88
+ "tunnel = tunnel_class(tunnel_port)\n",
89
+ "tunnel.add_tunnel(command=\"cl tunnel --url localhost:{port}\", name=\"cl\", pattern=re.compile(r\"[\\w-]+\\.trycloudflare\\.com\"))\n",
90
+ "tunnel.add_tunnel(command=\"lt --port {port}\", name=\"lt\", pattern=re.compile(r\"[\\w-]+\\.loca\\.lt\"), note=\"Password : \" + \"\\033[32m\" + public_ipv4 + \"\\033[0m\" + \" rerun cell if 404 error.\")\n",
91
+ "\n",
92
+ "if zrok_token:\n",
93
+ " !zrok enable {zrok_token} &> /dev/null\n",
94
+ " tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
95
+ "\n",
96
+ "clear_output()\n",
97
+ "\n",
98
+ "# =============== Automatic Fixing Path V3 ===============\n",
99
+ "paths_to_check = {\n",
100
+ " \"tagger_hf_cache_dir\": f\"{webui_path}/models/interrogators/\",\n",
101
+ " \"additional_networks_extra_lora_path\": f\"{webui_path}/models/Lora/\",\n",
102
+ " \"ad_extra_models_dir\": f\"{webui_path}/models/adetailer/\",\n",
103
+ " \"sd_checkpoint_hash\": \"\",\n",
104
+ " \"sd_model_checkpoint\": \"\",\n",
105
+ " \"sd_vae\": \"None\"\n",
106
+ "}\n",
107
+ "\n",
108
+ "config_path = f'{webui_path}/config.json'\n",
109
+ "\n",
110
+ "if os.path.exists(config_path):\n",
111
+ " with open(config_path, 'r') as file:\n",
112
+ " config_data = json.load(file)\n",
113
+ "\n",
114
+ " for key, value in paths_to_check.items():\n",
115
+ " if key in config_data and config_data[key] != value:\n",
116
+ " sed_command = f\"sed -i 's|\\\"{key}\\\": \\\".*\\\"|\\\"{key}\\\": \\\"{value}\\\"|' {config_path}\"\n",
117
+ " os.system(sed_command)\n",
118
+ "\n",
119
+ " if env == 'Kaggle':\n",
120
+ " !sed -i 's|\"civitai_interface NSFW content\":.*|\"civitai_interface NSFW content\": false,|' {webui_path}/ui-config.json\n",
121
+ "\n",
122
+ "with tunnel:\n",
123
+ " %cd {webui_path}\n",
124
+ "\n",
125
+ " commandline_arguments += f' --port={tunnel_port}'\n",
126
+ " if ngrok_token:\n",
127
+ " commandline_arguments += f' --ngrok {ngrok_token}'\n",
128
+ " if env != \"Google Colab\":\n",
129
+ " commandline_arguments += f' --encrypt-pass={tunnel_port} --api'\n",
130
+ "\n",
131
+ " if change_webui == 'Forge':\n",
132
+ " commandline_arguments += ' --cuda-stream --pin-shared-memory'\n",
133
+ "\n",
134
+ " !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
135
+ "\n",
136
+ "start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
137
+ "time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
138
+ "print(f\"\\n⌚️ \\033[0mВы проводите эту сессию в течение - \\033[33m{time_since_start}\\033[0m\\n\\n\")\n",
139
+ "\n",
140
+ "if zrok_token:\n",
141
+ " !zrok disable &> /dev/null"
142
+ ]
143
+ }
144
+ ]
145
  }
files_cells/python/en/downloading_en.py CHANGED
@@ -1,666 +1,668 @@
1
- ##~ DOWNLOADING CODE | BY: ANXETY ~##
2
-
3
- import os
4
- import re
5
- import time
6
- import json
7
- import shutil
8
- import zipfile
9
- import requests
10
- import subprocess
11
- from datetime import timedelta
12
- from subprocess import getoutput
13
- from IPython.utils import capture
14
- from IPython.display import clear_output
15
- from urllib.parse import urlparse, parse_qs
16
-
17
-
18
- # ================= DETECT ENV =================
19
- def detect_environment():
20
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
21
- environments = {
22
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
23
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
24
- }
25
- for env_var, (environment, path) in environments.items():
26
- if env_var in os.environ:
27
- return environment, path, free_plan
28
-
29
- env, root_path, free_plan = detect_environment()
30
- webui_path = f"{root_path}/sdw"
31
-
32
-
33
- # ================ LIBRARIES V2 ================
34
- flag_file = f"{root_path}/libraries_installed.txt"
35
-
36
- if not os.path.exists(flag_file):
37
- print("💿 Installing the libraries, it's going to take a while:\n")
38
-
39
- install_lib = {
40
- "aria2": "apt -y install aria2",
41
- "localtunnel": "npm install -g localtunnel",
42
- "insightface": "pip install insightface"
43
- }
44
-
45
- additional_libs = {
46
- "Google Colab": {
47
- "xformers": "pip install xformers==0.0.26.post1 --no-deps"
48
- },
49
- "Kaggle": {
50
- "xformers": "pip install xformers==0.0.26.post1",
51
- # "torch": "pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121",
52
- "aiohttp": "pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*" # fix install req
53
- }
54
- }
55
-
56
- if env in additional_libs:
57
- install_lib.update(additional_libs[env])
58
-
59
- # Loop through libraries
60
- for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):
61
- print(f"\r[{index}/{len(install_lib)}] \033[32m>>\033[0m Installing \033[33m{package}\033[0m..." + " "*35, end='')
62
- subprocess.run(install_cmd, shell=True, capture_output=True)
63
-
64
- # Additional specific packages
65
- with capture.capture_output() as cap:
66
- get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
67
- get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
68
- get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz')
69
- del cap
70
-
71
- clear_output()
72
-
73
- # Save file install lib
74
- with open(flag_file, "w") as f:
75
- f.write(">W<'")
76
-
77
- print("🍪 Libraries are installed!" + " "*35)
78
- time.sleep(2)
79
- clear_output()
80
-
81
-
82
- # ================= loading settings V4 =================
83
- def load_settings(path):
84
- if os.path.exists(path):
85
- with open(path, 'r') as file:
86
- return json.load(file)
87
- return {}
88
-
89
- settings = load_settings(f'{root_path}/settings.json')
90
-
91
- VARIABLES = [
92
- 'model', 'model_num', 'inpainting_model',
93
- 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',
94
- 'change_webui', 'detailed_download', 'controlnet',
95
- 'controlnet_num', 'commit_hash', 'huggingface_token',
96
- 'ngrok_token', 'zrok_token', 'commandline_arguments',
97
- 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',
98
- 'Extensions_url', 'custom_file_urls'
99
- ]
100
-
101
- locals().update({key: settings.get(key) for key in VARIABLES})
102
-
103
-
104
- # ================= OTHER =================
105
- try:
106
- start_colab
107
- except:
108
- start_colab = int(time.time())-5
109
-
110
- # CONFIG DIR
111
- models_dir = f"{webui_path}/models/Stable-diffusion"
112
- vaes_dir = f"{webui_path}/models/VAE"
113
- embeddings_dir = f"{webui_path}/embeddings"
114
- loras_dir = f"{webui_path}/models/Lora"
115
- extensions_dir = f"{webui_path}/extensions"
116
- control_dir = f"{webui_path}/models/ControlNet"
117
- adetailer_dir = f"{webui_path}/models/adetailer"
118
-
119
-
120
- # ================= MAIN CODE =================
121
- if not os.path.exists(webui_path):
122
- start_install = int(time.time())
123
- print("⌚ Unpacking Stable Diffusion..." if change_webui != 'Forge' else "⌚ Unpacking Stable Diffusion (Forge)...", end='')
124
- with capture.capture_output() as cap:
125
- aria2_command = "aria2c --console-log-level=error -c -x 16 -s 16 -k 1M"
126
- url = "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip" if change_webui != 'Forge' else "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip"
127
- get_ipython().system('{aria2_command} {url} -o repo.zip')
128
-
129
- get_ipython().system('unzip -q -o repo.zip -d {webui_path}')
130
- get_ipython().system('rm -rf repo.zip')
131
-
132
- get_ipython().run_line_magic('cd', '{root_path}')
133
- os.environ["SAFETENSORS_FAST_GPU"]='1'
134
- os.environ["CUDA_MODULE_LOADING"]="LAZY"
135
- os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
136
- os.environ["PYTHONWARNINGS"] = "ignore"
137
-
138
- get_ipython().system('echo -n {start_colab} > {webui_path}/static/colabTimer.txt')
139
- del cap
140
- install_time = timedelta(seconds=time.time()-start_install)
141
- print("\r🚀 Unpacking is complete! For","%02d:%02d:%02d ⚡\n" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)
142
- else:
143
- print("🚀 All unpacked... Skip. ⚡")
144
- start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
145
- time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
146
- print(f"⌚️ You have been conducting this session for - \033[33m{time_since_start}\033[0m")
147
-
148
-
149
- ## Changes extensions and WebUi
150
- if latest_webui or latest_exstensions:
151
- action = "Updating WebUI and Extensions" if latest_webui and latest_exstensions else ("WebUI Update" if latest_webui else "Update Extensions")
152
- print(f"⌚️ {action}...", end='', flush=True)
153
- with capture.capture_output() as cap:
154
- get_ipython().system('git config --global user.email "[email protected]"')
155
- get_ipython().system('git config --global user.name "Your Name"')
156
-
157
- ## Update Webui
158
- if latest_webui:
159
- get_ipython().run_line_magic('cd', '{webui_path}')
160
- get_ipython().system('git restore .')
161
- get_ipython().system('git pull -X theirs --rebase --autostash')
162
-
163
- ## Update extensions
164
- if latest_exstensions:
165
- get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}')
166
- del cap
167
- print(f"\r✨ {action} Completed!")
168
-
169
-
170
- # === FIXING EXTENSIONS ===
171
- anxety_repos = "https://huggingface.co/NagisaNao/fast_repo/resolve/main"
172
-
173
- with capture.capture_output() as cap:
174
- # --- Umi-Wildcard ---
175
- get_ipython().system("sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default")
176
-
177
- # --- Encrypt-Image ---
178
- get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui")
179
-
180
- # --- Additional-Networks ---
181
- get_ipython().system('wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style')
182
- del cap
183
-
184
-
185
- ## Version switching
186
- if commit_hash:
187
- print('⏳ Time machine activation...', end="", flush=True)
188
- with capture.capture_output() as cap:
189
- get_ipython().run_line_magic('cd', '{webui_path}')
190
- get_ipython().system('git config --global user.email "[email protected]"')
191
- get_ipython().system('git config --global user.name "Your Name"')
192
- get_ipython().system('git reset --hard {commit_hash}')
193
- del cap
194
- print(f"\r⌛️ The time machine has been activated! Current commit: \033[34m{commit_hash}\033[0m")
195
-
196
-
197
- ## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!
198
- print("📦 Downloading models and stuff...", end='')
199
- model_list = {
200
- "1.Anime (by XpucT) + INP": [
201
- {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_V2.safetensors"},
202
- {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_V2-inpainting.safetensors"}
203
- ],
204
- "2.BluMix [Anime] [V7] + INP": [
205
- {"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_V7.safetensors"},
206
- {"url": "https://civitai.com/api/download/models/363850", "name": "BluMix_V7-inpainting.safetensors"}
207
- ],
208
- "3.Cetus-Mix [Anime] [V4] + INP": [
209
- {"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
210
- {"url": "https://civitai.com/api/download/models/139882", "name": "CetusMix_V4-inpainting.safetensors"}
211
- ],
212
- "4.Counterfeit [Anime] [V3] + INP": [
213
- {"url": "https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors", "name": "Counterfeit_V3.safetensors"},
214
- {"url": "https://civitai.com/api/download/models/137911", "name": "Counterfeit_V3-inpainting.safetensors"}
215
- ],
216
- "5.CuteColor [Anime] [V3]": [
217
- {"url": "https://civitai.com/api/download/models/138754", "name": "CuteColor_V3.safetensors"}
218
- ],
219
- "6.Dark-Sushi-Mix [Anime]": [
220
- {"url": "https://civitai.com/api/download/models/101640", "name": "DarkSushiMix_2_5D.safetensors"},
221
- {"url": "https://civitai.com/api/download/models/56071", "name": "DarkSushiMix_colorful.safetensors"}
222
- ],
223
- "7.Deliberate [Realism] [V6] + INP": [
224
- {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors", "name": "Deliberate_V6.safetensors"},
225
- {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors", "name": "Deliberate_V6-inpainting.safetensors"}
226
- ],
227
- "8.Meina-Mix [Anime] [V11] + INP": [
228
- {"url": "https://civitai.com/api/download/models/119057", "name": "MeinaMix_V11.safetensors"},
229
- {"url": "https://civitai.com/api/download/models/120702", "name": "MeinaMix_V11-inpainting.safetensors"}
230
- ],
231
- "9.Mix-Pro [Anime] [V4] + INP": [
232
- {"url": "https://civitai.com/api/download/models/125668", "name": "MixPro_V4.safetensors"},
233
- {"url": "https://civitai.com/api/download/models/139878", "name": "MixPro_V4-inpainting.safetensors"}
234
- ]
235
- }
236
-
237
- vae_list = {
238
- "1.Anime.vae": [{"url": "https://civitai.com/api/download/models/311162", "name": "vae-ft-mse-840000-ema-pruned.vae.safetensors"}],
239
- "2.Anything.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors", "name": "Anything.vae.safetensors"}],
240
- "3.Blessed2.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors", "name": "Blessed2.vae.safetensors"}],
241
- "4.ClearVae.vae": [{"url": "https://civitai.com/api/download/models/88156", "name": "ClearVae_23.vae.safetensors"}],
242
- "5.WD.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors", "name": "WD.vae.safetensors"}]
243
- }
244
-
245
- controlnet_list = {
246
- "1.canny": [
247
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors", "name": "control_v11p_sd15_canny_fp16.safetensors"},
248
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml", "name": "control_v11p_sd15_canny_fp16.yaml"}
249
- ],
250
- "2.openpose": [
251
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors", "name": "control_v11p_sd15_openpose_fp16.safetensors"},
252
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml", "name": "control_v11p_sd15_openpose_fp16.yaml"}
253
- ],
254
- "3.depth": [
255
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors", "name": "control_v11f1p_sd15_depth_fp16.safetensors"},
256
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml", "name": "control_v11f1p_sd15_depth_fp16.yaml"},
257
- {"url": "https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors", "name": "control_v11p_sd15_depth_anything_fp16.safetensors"}
258
- ],
259
- "4.normal_map": [
260
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors", "name": "control_v11p_sd15_normalbae_fp16.safetensors"},
261
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml", "name": "control_v11p_sd15_normalbae_fp16.yaml"}
262
- ],
263
- "5.mlsd": [
264
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors", "name": "control_v11p_sd15_mlsd_fp16.safetensors"},
265
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml", "name": "control_v11p_sd15_mlsd_fp16.yaml"}
266
- ],
267
- "6.lineart": [
268
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors", "name": "control_v11p_sd15_lineart_fp16.safetensors"},
269
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors", "name": "control_v11p_sd15s2_lineart_anime_fp16.safetensors"},
270
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml", "name": "control_v11p_sd15_lineart_fp16.yaml"},
271
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml", "name": "control_v11p_sd15s2_lineart_anime_fp16.yaml"}
272
- ],
273
- "7.soft_edge": [
274
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors", "name": "control_v11p_sd15_softedge_fp16.safetensors"},
275
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml", "name": "control_v11p_sd15_softedge_fp16.yaml"}
276
- ],
277
- "8.scribble": [
278
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors", "name": "control_v11p_sd15_scribble_fp16.safetensors"},
279
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml", "name": "control_v11p_sd15_scribble_fp16.yaml"}
280
- ],
281
- "9.segmentation": [
282
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors", "name": "control_v11p_sd15_seg_fp16.safetensors"},
283
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml", "name": "control_v11p_sd15_seg_fp16.yaml"}
284
- ],
285
- "10.shuffle": [
286
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors", "name": "control_v11e_sd15_shuffle_fp16.safetensors"},
287
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml", "name": "control_v11e_sd15_shuffle_fp16.yaml"}
288
- ],
289
- "11.tile": [
290
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors", "name": "control_v11f1e_sd15_tile_fp16.safetensors"},
291
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml", "name": "control_v11f1e_sd15_tile_fp16.yaml"}
292
- ],
293
- "12.inpaint": [
294
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors", "name": "control_v11p_sd15_inpaint_fp16.safetensors"},
295
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml", "name": "control_v11p_sd15_inpaint_fp16.yaml"}
296
- ],
297
- "13.instruct_p2p": [
298
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors", "name": "control_v11e_sd15_ip2p_fp16.safetensors"},
299
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml", "name": "control_v11e_sd15_ip2p_fp16.yaml"}
300
- ]
301
- }
302
-
303
- url = ""
304
- prefixes = {
305
- "model": models_dir,
306
- "vae": vaes_dir,
307
- "lora": loras_dir,
308
- "embed": embeddings_dir,
309
- "extension": extensions_dir,
310
- "control": control_dir,
311
- "adetailer": adetailer_dir,
312
- "config": webui_path
313
- }
314
-
315
- extension_repo = []
316
- directories = [value for key, value in prefixes.items()] # for unpucking zip files
317
- get_ipython().system('mkdir -p {" ".join(directories)}')
318
-
319
- hf_token = huggingface_token if huggingface_token else "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"
320
- user_header = f"\"Authorization: Bearer {hf_token}\""
321
-
322
- ''' Formatted Info Output '''
323
-
324
- from math import floor
325
-
326
- def center_text(text, terminal_width=45):
327
- text_length = len(text)
328
- left_padding = floor((terminal_width - text_length) / 2)
329
- right_padding = terminal_width - text_length - left_padding
330
- return f"\033[1m\033[36m{' ' * left_padding}{text}{' ' * right_padding}\033[0m\033[32m"
331
-
332
- def format_output(url, dst_dir, file_name):
333
- info = f"[{file_name.split('.')[0]}]"
334
- info = center_text(info)
335
-
336
- print(f"\n\033[32m{'---'*20}]{info}[{'---'*20}")
337
- print(f"\033[33mURL: \033[34m{url}")
338
- print(f"\033[33mSAVE DIR: \033[34m{dst_dir}")
339
- print(f"\033[33mFILE NAME: \033[34m{file_name}\033[0m")
340
-
341
- ''' GET CivitAi API - DATA '''
342
-
343
- def strip_(url, file_name=None):
344
- if 'github.com' in url:
345
- if '/blob/' in url:
346
- url = url.replace('/blob/', '/raw/')
347
-
348
- elif "civitai.com" in url:
349
- return CivitAi_API(url, file_name)
350
-
351
- elif "huggingface.co" in url:
352
- if '/blob/' in url:
353
- url = url.replace('/blob/', '/resolve/')
354
- if '?' in url:
355
- url = url.split('?')[0]
356
-
357
- return url
358
-
359
- def CivitAi_API(url, file_name=None):
360
- support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')
361
- civitai_token = "62c0c5956b2f9defbd844d754000180b"
362
-
363
- if '?token=' in url:
364
- url = url.split('?token=')[0]
365
- if '?type=' in url:
366
- url = url.replace('?type=', f'?token={civitai_token}&type=')
367
- else:
368
- url = f"{url}?token={civitai_token}"
369
-
370
- # Determine model or version id
371
- if "civitai.com/models/" in url:
372
- if '?modelVersionId=' in url:
373
- version_id = url.split('?modelVersionId=')[1]
374
- response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
375
- # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
376
- else:
377
- model_id = url.split('/models/')[1].split('/')[0]
378
- response = requests.get(f"https://civitai.com/api/v1/models/{model_id}")
379
- # print(f"end - https://civitai.com/api/v1/models/{model_id}")
380
- else:
381
- version_id = url.split('/models/')[1].split('/')[0]
382
- response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
383
- # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
384
-
385
- data = response.json()
386
-
387
- if response.status_code != 200:
388
- return None, None, None, None, None, None, None
389
-
390
- # Define model type and name
391
- if "civitai.com/models/" in url:
392
- if '?modelVersionId=' in url:
393
- model_type = data['model']['type']
394
- model_name = data['files'][0]['name']
395
- else:
396
- model_type = data['type']
397
- model_name = data['modelVersions'][0]['files'][0]['name']
398
- elif 'type=' in url:
399
- model_type = parse_qs(urlparse(url).query).get('type', [''])[0]
400
- if 'model' in model_type.lower():
401
- model_name = data['files'][0]['name']
402
- else:
403
- model_name = data['files'][1]['name']
404
- else:
405
- model_type = data['model']['type']
406
- model_name = data['files'][0]['name']
407
-
408
- model_name = file_name or model_name
409
-
410
- # Determine DownloadUrl
411
- if "civitai.com/models/" in url:
412
- if '?modelVersionId=' in url:
413
- download_url = data.get('downloadUrl')
414
- else:
415
- download_url = data["modelVersions"][0].get("downloadUrl", "")
416
- elif 'type=' in url:
417
- if any(t.lower() in model_type.lower() for t in support_types):
418
- download_url = data['files'][0]['downloadUrl']
419
- else:
420
- download_url = data['files'][1]['downloadUrl']
421
- else:
422
- download_url = data.get('downloadUrl')
423
-
424
- clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token
425
-
426
- # Find a safe image: level less than 4 | Kaggle
427
- image_url, image_name = None, None
428
- if any(t in model_type for t in support_types):
429
- try:
430
- images = data.get('images') or data['modelVersions'][0].get('images', [])
431
- if env == 'Kaggle':
432
- image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)
433
- else:
434
- image_url = images[0]['url'] if images else None
435
- except KeyError:
436
- pass
437
-
438
- # Generate a name to save the image
439
- image_name = f"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}" if image_url else None
440
-
441
- return f"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}", clean_url, model_type, model_name, image_url, image_name, data
442
-
443
- ''' Main Download Code '''
444
-
445
- def download(url):
446
- links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]
447
-
448
- for link_or_path in links_and_paths:
449
- if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):
450
- handle_manual(link_or_path)
451
- else:
452
- url, dst_dir, file_name = link_or_path.split()
453
- manual_download(url, dst_dir, file_name)
454
-
455
- unpack_zip_files()
456
-
457
- def unpack_zip_files():
458
- for directory in directories:
459
- for root, _, files in os.walk(directory):
460
- for file in files:
461
- if file.endswith(".zip"):
462
- zip_path = os.path.join(root, file)
463
- extract_path = os.path.splitext(zip_path)[0]
464
- with zipfile.ZipFile(zip_path, 'r') as zip_ref:
465
- zip_ref.extractall(extract_path)
466
- os.remove(zip_path)
467
-
468
- def handle_manual(url):
469
- url_parts = url.split(':', 1)
470
- prefix, path = url_parts[0], url_parts[1]
471
-
472
- file_name_match = re.search(r'\[(.*?)\]', path)
473
- file_name = file_name_match.group(1) if file_name_match else None
474
- if file_name:
475
- path = re.sub(r'\[.*?\]', '', path)
476
-
477
- if prefix in prefixes:
478
- dir = prefixes[prefix]
479
- if prefix != "extension":
480
- try:
481
- manual_download(path, dir, file_name=file_name)
482
- except Exception as e:
483
- print(f"Error downloading file: {e}")
484
- else:
485
- extension_repo.append((path, file_name))
486
-
487
- def manual_download(url, dst_dir, file_name):
488
- aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'
489
- basename = url.split("/")[-1] if file_name is None else file_name
490
- header_option = f"--header={user_header}"
491
-
492
- if 'github.com' in url:
493
- url = strip_(url)
494
-
495
- # -- CivitAi APi+ V2 --
496
- elif 'civitai' in url:
497
- url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)
498
-
499
- if image_url and image_name:
500
- with capture.capture_output() as cap:
501
- get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'")
502
- del cap
503
-
504
- elif "huggingface.co" in url:
505
- clean_url = strip_(url)
506
-
507
- """ Formatted info output """
508
- model_name_or_basename = file_name if not 'huggingface' in url else basename
509
- format_output(clean_url or url, dst_dir, model_name_or_basename)
510
-
511
- # ## -- for my tests --
512
- # print(url, dst_dir, model_name_or_basename)
513
- print(f"\033[31m[Data Info]:\033[0m Failed to retrieve data from the API.\n") if 'civitai' in url and not data else None
514
- if 'civitai' in url and data and image_name:
515
- print(f"\033[32m[Preview DL]:\033[0m {image_name} - {image_url}\n")
516
- # =====================
517
-
518
- # # -- Git Hub --
519
- if 'github.com' in url or 'githubusercontent.com' in url:
520
- get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
521
-
522
- # -- GDrive --
523
- elif 'drive.google' in url:
524
- try:
525
- have_drive_link
526
- except:
527
- get_ipython().system('pip install -U gdown > /dev/null')
528
- have_drive_link = True
529
-
530
- if 'folders' in url:
531
- get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
532
- else:
533
- if file_name:
534
- get_ipython().system('gdown "{url}" -O {dst_dir}/{file_name} --fuzzy -c')
535
- else:
536
- get_ipython().system('gdown "{url}" -O {dst_dir} --fuzzy -c')
537
-
538
- # -- Hugging Face --
539
- elif 'huggingface' in url:
540
- get_ipython().system("aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
541
-
542
- # -- Other --
543
- elif 'http' in url:
544
- get_ipython().system("aria2c {aria2_args} -d {dst_dir} '{'-o' + file_name if file_name else ''}' '{url}'")
545
-
546
- ''' SubModels - Added URLs '''
547
-
548
- def add_submodels(selection, num_selection, model_dict, dst_dir):
549
- if selection == "none":
550
- return []
551
- if selection == "ALL":
552
- all_models = []
553
- for models in model_dict.values():
554
- all_models.extend(models)
555
- selected_models = all_models
556
- else:
557
- selected_models = model_dict[selection]
558
- selected_nums = map(int, num_selection.replace(',', '').split())
559
- for num in selected_nums:
560
- if 1 <= num <= len(model_dict):
561
- name = list(model_dict)[num - 1]
562
- selected_models.extend(model_dict[name])
563
-
564
- unique_models = list({model['name']: model for model in selected_models}.values())
565
- for model in unique_models:
566
- model['dst_dir'] = dst_dir
567
-
568
- return unique_models
569
-
570
- def handle_submodels(selection, num_selection, model_dict, dst_dir, url):
571
- submodels = add_submodels(selection, num_selection, model_dict, dst_dir)
572
- for submodel in submodels:
573
- if not inpainting_model and "inpainting" in submodel['name']:
574
- continue
575
- url += f"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, "
576
- return url
577
-
578
- url = handle_submodels(model, model_num, model_list, models_dir, url)
579
- url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)
580
- url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)
581
-
582
- ''' file.txt - added urls '''
583
-
584
- def process_file_download(file_url, prefixes, unique_urls):
585
- files_urls = ""
586
-
587
- if file_url.startswith("http"):
588
- if "blob" in file_url:
589
- file_url = file_url.replace("blob", "raw")
590
- response = requests.get(file_url)
591
- lines = response.text.split('\n')
592
- else:
593
- with open(file_url, 'r') as file:
594
- lines = file.readlines()
595
-
596
- current_tag = None
597
- for line in lines:
598
- line = line.strip()
599
- if any(f'# {tag}' in line.lower() for tag in prefixes):
600
- current_tag = next((tag for tag in prefixes if tag in line.lower()))
601
-
602
- urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
603
- for url in urls:
604
- filter_url = url.split('[')[0] # same url filter
605
-
606
- if url.startswith("http") and filter_url not in unique_urls:
607
- files_urls += f"{current_tag}:{url}, "
608
- unique_urls.add(filter_url)
609
-
610
- return files_urls
611
-
612
- file_urls = ""
613
- unique_urls = set()
614
-
615
- if custom_file_urls:
616
- for custom_file_url in custom_file_urls.replace(',', '').split():
617
- if not custom_file_url.endswith('.txt'):
618
- custom_file_url += '.txt'
619
- if not custom_file_url.startswith('http'):
620
- if not custom_file_url.startswith(root_path):
621
- custom_file_url = f'{root_path}/{custom_file_url}'
622
-
623
- try:
624
- file_urls += process_file_download(custom_file_url, prefixes, unique_urls)
625
- except FileNotFoundError:
626
- pass
627
-
628
- # url prefixing
629
- urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)
630
- prefixed_urls = (f"{prefix}:{url}" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())
631
- url += ", ".join(prefixed_urls) + ", " + file_urls
632
-
633
- if detailed_download == "on":
634
- print("\n\n\033[33m# ====== Detailed Download ====== #\n\033[0m")
635
- download(url)
636
- print("\n\033[33m# =============================== #\n\033[0m")
637
- else:
638
- with capture.capture_output() as cap:
639
- download(url)
640
- del cap
641
-
642
- print("\r🏁 Download Complete!" + " "*15)
643
-
644
-
645
- # Cleaning shit after downloading...
646
- get_ipython().system('find {webui_path} \\( -type d \\( -name ".ipynb_checkpoints" -o -name ".aria2" \\) -o -type f -name "*.aria2" \\) -exec rm -r {{}} \\; >/dev/null 2>&1')
647
-
648
-
649
- ## Install of Custom extensions
650
- if len(extension_repo) > 0:
651
- print("✨ Installing custom extensions...", end='', flush=True)
652
- with capture.capture_output() as cap:
653
- for repo, repo_name in extension_repo:
654
- if not repo_name:
655
- repo_name = repo.split('/')[-1]
656
- get_ipython().system('cd {extensions_dir} && git clone {repo} {repo_name} && cd {repo_name} && git fetch')
657
- del cap
658
- print(f"\r📦 Installed '{len(extension_repo)}', Custom extensions!")
659
-
660
-
661
- ## List Models and stuff V2
662
- if detailed_download == "off":
663
- print("\n\n\033[33mIf you don't see any downloaded files, enable the 'Detailed Downloads' feature in the widget.")
664
-
665
- get_ipython().run_line_magic('run', '{root_path}/file_cell/special/dl_display_results.py # display widgets result')
666
-
 
 
 
1
+ ##~ DOWNLOADING CODE | BY: ANXETY ~##
2
+
3
+ import os
4
+ import re
5
+ import time
6
+ import json
7
+ import shutil
8
+ import zipfile
9
+ import requests
10
+ import subprocess
11
+ from datetime import timedelta
12
+ from subprocess import getoutput
13
+ from IPython.utils import capture
14
+ from IPython.display import clear_output
15
+ from urllib.parse import urlparse, parse_qs
16
+
17
+
18
+ # ================= DETECT ENV =================
19
+ def detect_environment():
20
+ free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
21
+ environments = {
22
+ 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
23
+ 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
24
+ }
25
+ for env_var, (environment, path) in environments.items():
26
+ if env_var in os.environ:
27
+ return environment, path, free_plan
28
+
29
+ env, root_path, free_plan = detect_environment()
30
+ webui_path = f"{root_path}/sdw"
31
+
32
+
33
+ # ================ LIBRARIES V2 ================
34
+ flag_file = f"{root_path}/libraries_installed.txt"
35
+
36
+ if not os.path.exists(flag_file):
37
+ print("💿 Installing the libraries, it's going to take a while:\n")
38
+
39
+ install_lib = {
40
+ # "aria2": "apt -y install aria2",
41
+ "aria2": "pip install aria2",
42
+ "localtunnel": "npm install -g localtunnel",
43
+ "insightface": "pip install insightface"
44
+ }
45
+
46
+ additional_libs = {
47
+ "Google Colab": {
48
+ "xformers": "pip install xformers==0.0.27 --no-deps"
49
+ },
50
+ "Kaggle": {
51
+ "xformers": "pip install xformers==0.0.26.post1",
52
+ # "torch": "pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121",
53
+ # "aiohttp": "pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*" # fix install req
54
+ }
55
+ }
56
+
57
+ if env in additional_libs:
58
+ install_lib.update(additional_libs[env])
59
+
60
+ # Loop through libraries
61
+ for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):
62
+ print(f"\r[{index}/{len(install_lib)}] \033[32m>>\033[0m Installing \033[33m{package}\033[0m..." + " "*35, end='')
63
+ subprocess.run(install_cmd, shell=True, capture_output=True)
64
+
65
+ # Additional specific packages
66
+ with capture.capture_output() as cap:
67
+ get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
68
+ get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
69
+ get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz')
70
+ del cap
71
+
72
+ clear_output()
73
+
74
+ # Save file install lib
75
+ with open(flag_file, "w") as f:
76
+ f.write(">W<'")
77
+
78
+ print("🍪 Libraries are installed!" + " "*35)
79
+ time.sleep(2)
80
+ clear_output()
81
+
82
+
83
+ # ================= loading settings V4 =================
84
+ def load_settings(path):
85
+ if os.path.exists(path):
86
+ with open(path, 'r') as file:
87
+ return json.load(file)
88
+ return {}
89
+
90
+ settings = load_settings(f'{root_path}/settings.json')
91
+
92
+ VARIABLES = [
93
+ 'model', 'model_num', 'inpainting_model',
94
+ 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',
95
+ 'change_webui', 'detailed_download', 'controlnet',
96
+ 'controlnet_num', 'commit_hash', 'huggingface_token',
97
+ 'ngrok_token', 'zrok_token', 'commandline_arguments',
98
+ 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',
99
+ 'Extensions_url', 'custom_file_urls'
100
+ ]
101
+
102
+ locals().update({key: settings.get(key) for key in VARIABLES})
103
+
104
+
105
+ # ================= OTHER =================
106
+ try:
107
+ start_colab
108
+ except:
109
+ start_colab = int(time.time())-5
110
+
111
+ # CONFIG DIR
112
+ models_dir = f"{webui_path}/models/Stable-diffusion"
113
+ vaes_dir = f"{webui_path}/models/VAE"
114
+ embeddings_dir = f"{webui_path}/embeddings"
115
+ loras_dir = f"{webui_path}/models/Lora"
116
+ extensions_dir = f"{webui_path}/extensions"
117
+ control_dir = f"{webui_path}/models/ControlNet"
118
+ adetailer_dir = f"{webui_path}/models/adetailer"
119
+
120
+
121
+ # ================= MAIN CODE =================
122
+ if not os.path.exists(webui_path):
123
+ start_install = int(time.time())
124
+ print("⌚ Unpacking Stable Diffusion..." if change_webui != 'Forge' else "⌚ Unpacking Stable Diffusion (Forge)...", end='')
125
+ with capture.capture_output() as cap:
126
+ aria2_command = "aria2c --console-log-level=error -c -x 16 -s 16 -k 1M"
127
+ url = "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip" if change_webui != 'Forge' else "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip"
128
+ get_ipython().system('{aria2_command} {url} -o repo.zip')
129
+
130
+ get_ipython().system('unzip -q -o repo.zip -d {webui_path}')
131
+ get_ipython().system('rm -rf repo.zip')
132
+
133
+ get_ipython().run_line_magic('cd', '{root_path}')
134
+ os.environ["SAFETENSORS_FAST_GPU"]='1'
135
+ os.environ["CUDA_MODULE_LOADING"]="LAZY"
136
+ os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
137
+ os.environ["PYTHONWARNINGS"] = "ignore"
138
+
139
+ get_ipython().system('echo -n {start_colab} > {webui_path}/static/colabTimer.txt')
140
+ del cap
141
+ install_time = timedelta(seconds=time.time()-start_install)
142
+ print("\r🚀 Unpacking is complete! For","%02d:%02d:%02d ⚡\n" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)
143
+ else:
144
+ print("🚀 All unpacked... Skip. ⚡")
145
+ start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
146
+ time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
147
+ print(f"⌚️ You have been conducting this session for - \033[33m{time_since_start}\033[0m")
148
+
149
+
150
+ ## Changes extensions and WebUi
151
+ if latest_webui or latest_exstensions:
152
+ action = "Updating WebUI and Extensions" if latest_webui and latest_exstensions else ("WebUI Update" if latest_webui else "Update Extensions")
153
+ print(f"⌚️ {action}...", end='', flush=True)
154
+ with capture.capture_output() as cap:
155
+ get_ipython().system('git config --global user.email "[email protected]"')
156
+ get_ipython().system('git config --global user.name "Your Name"')
157
+
158
+ ## Update Webui
159
+ if latest_webui:
160
+ get_ipython().run_line_magic('cd', '{webui_path}')
161
+ get_ipython().system('git restore .')
162
+ get_ipython().system('git pull -X theirs --rebase --autostash')
163
+
164
+ ## Update extensions
165
+ if latest_exstensions:
166
+ get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}')
167
+ del cap
168
+ print(f"\r✨ {action} Completed!")
169
+
170
+
171
+ # === FIXING EXTENSIONS ===
172
+ anxety_repos = "https://huggingface.co/NagisaNao/fast_repo/resolve/main"
173
+
174
+ with capture.capture_output() as cap:
175
+ # --- Umi-Wildcard ---
176
+ get_ipython().system("sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default")
177
+
178
+ # --- Encrypt-Image ---
179
+ get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui")
180
+
181
+ # --- Additional-Networks ---
182
+ get_ipython().system('wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style')
183
+ del cap
184
+
185
+
186
+ ## Version switching
187
+ if commit_hash:
188
+ print('⏳ Time machine activation...', end="", flush=True)
189
+ with capture.capture_output() as cap:
190
+ get_ipython().run_line_magic('cd', '{webui_path}')
191
+ get_ipython().system('git config --global user.email "[email protected]"')
192
+ get_ipython().system('git config --global user.name "Your Name"')
193
+ get_ipython().system('git reset --hard {commit_hash}')
194
+ del cap
195
+ print(f"\r⌛️ The time machine has been activated! Current commit: \033[34m{commit_hash}\033[0m")
196
+
197
+
198
+ ## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!
199
+ print("📦 Downloading models and stuff...", end='')
200
+ model_list = {
201
+ "1.Anime (by XpucT) + INP": [
202
+ {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_V2.safetensors"},
203
+ {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_V2-inpainting.safetensors"}
204
+ ],
205
+ "2.BluMix [Anime] [V7] + INP": [
206
+ {"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_V7.safetensors"},
207
+ {"url": "https://civitai.com/api/download/models/363850", "name": "BluMix_V7-inpainting.safetensors"}
208
+ ],
209
+ "3.Cetus-Mix [Anime] [V4] + INP": [
210
+ {"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
211
+ {"url": "https://civitai.com/api/download/models/139882", "name": "CetusMix_V4-inpainting.safetensors"}
212
+ ],
213
+ "4.Counterfeit [Anime] [V3] + INP": [
214
+ {"url": "https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors", "name": "Counterfeit_V3.safetensors"},
215
+ {"url": "https://civitai.com/api/download/models/137911", "name": "Counterfeit_V3-inpainting.safetensors"}
216
+ ],
217
+ "5.CuteColor [Anime] [V3]": [
218
+ {"url": "https://civitai.com/api/download/models/138754", "name": "CuteColor_V3.safetensors"}
219
+ ],
220
+ "6.Dark-Sushi-Mix [Anime]": [
221
+ {"url": "https://civitai.com/api/download/models/101640", "name": "DarkSushiMix_2_5D.safetensors"},
222
+ {"url": "https://civitai.com/api/download/models/56071", "name": "DarkSushiMix_colorful.safetensors"}
223
+ ],
224
+ "7.Deliberate [Realism] [V6] + INP": [
225
+ {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors", "name": "Deliberate_V6.safetensors"},
226
+ {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors", "name": "Deliberate_V6-inpainting.safetensors"}
227
+ ],
228
+ "8.Meina-Mix [Anime] [V11] + INP": [
229
+ {"url": "https://civitai.com/api/download/models/119057", "name": "MeinaMix_V11.safetensors"},
230
+ {"url": "https://civitai.com/api/download/models/120702", "name": "MeinaMix_V11-inpainting.safetensors"}
231
+ ],
232
+ "9.Mix-Pro [Anime] [V4] + INP": [
233
+ {"url": "https://civitai.com/api/download/models/125668", "name": "MixPro_V4.safetensors"},
234
+ {"url": "https://civitai.com/api/download/models/139878", "name": "MixPro_V4-inpainting.safetensors"}
235
+ ]
236
+ }
237
+
238
+ vae_list = {
239
+ "1.Anime.vae": [{"url": "https://civitai.com/api/download/models/311162", "name": "Anime.vae.safetensors"}],
240
+ "2.Anything.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors", "name": "Anything.vae.safetensors"}],
241
+ "3.Blessed2.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors", "name": "Blessed2.vae.safetensors"}],
242
+ "4.ClearVae.vae": [{"url": "https://civitai.com/api/download/models/88156", "name": "ClearVae_23.vae.safetensors"}],
243
+ "5.WD.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors", "name": "WD.vae.safetensors"}]
244
+ }
245
+
246
+ controlnet_list = {
247
+ "1.canny": [
248
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors", "name": "control_v11p_sd15_canny_fp16.safetensors"},
249
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml", "name": "control_v11p_sd15_canny_fp16.yaml"}
250
+ ],
251
+ "2.openpose": [
252
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors", "name": "control_v11p_sd15_openpose_fp16.safetensors"},
253
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml", "name": "control_v11p_sd15_openpose_fp16.yaml"}
254
+ ],
255
+ "3.depth": [
256
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors", "name": "control_v11f1p_sd15_depth_fp16.safetensors"},
257
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml", "name": "control_v11f1p_sd15_depth_fp16.yaml"},
258
+ {"url": "https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors", "name": "control_v11p_sd15_depth_anything_fp16.safetensors"}
259
+ ],
260
+ "4.normal_map": [
261
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors", "name": "control_v11p_sd15_normalbae_fp16.safetensors"},
262
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml", "name": "control_v11p_sd15_normalbae_fp16.yaml"}
263
+ ],
264
+ "5.mlsd": [
265
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors", "name": "control_v11p_sd15_mlsd_fp16.safetensors"},
266
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml", "name": "control_v11p_sd15_mlsd_fp16.yaml"}
267
+ ],
268
+ "6.lineart": [
269
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors", "name": "control_v11p_sd15_lineart_fp16.safetensors"},
270
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors", "name": "control_v11p_sd15s2_lineart_anime_fp16.safetensors"},
271
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml", "name": "control_v11p_sd15_lineart_fp16.yaml"},
272
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml", "name": "control_v11p_sd15s2_lineart_anime_fp16.yaml"}
273
+ ],
274
+ "7.soft_edge": [
275
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors", "name": "control_v11p_sd15_softedge_fp16.safetensors"},
276
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml", "name": "control_v11p_sd15_softedge_fp16.yaml"}
277
+ ],
278
+ "8.scribble": [
279
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors", "name": "control_v11p_sd15_scribble_fp16.safetensors"},
280
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml", "name": "control_v11p_sd15_scribble_fp16.yaml"}
281
+ ],
282
+ "9.segmentation": [
283
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors", "name": "control_v11p_sd15_seg_fp16.safetensors"},
284
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml", "name": "control_v11p_sd15_seg_fp16.yaml"}
285
+ ],
286
+ "10.shuffle": [
287
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors", "name": "control_v11e_sd15_shuffle_fp16.safetensors"},
288
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml", "name": "control_v11e_sd15_shuffle_fp16.yaml"}
289
+ ],
290
+ "11.tile": [
291
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors", "name": "control_v11f1e_sd15_tile_fp16.safetensors"},
292
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml", "name": "control_v11f1e_sd15_tile_fp16.yaml"}
293
+ ],
294
+ "12.inpaint": [
295
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors", "name": "control_v11p_sd15_inpaint_fp16.safetensors"},
296
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml", "name": "control_v11p_sd15_inpaint_fp16.yaml"}
297
+ ],
298
+ "13.instruct_p2p": [
299
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors", "name": "control_v11e_sd15_ip2p_fp16.safetensors"},
300
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml", "name": "control_v11e_sd15_ip2p_fp16.yaml"}
301
+ ]
302
+ }
303
+
304
+ url = ""
305
+ prefixes = {
306
+ "model": models_dir,
307
+ "vae": vaes_dir,
308
+ "lora": loras_dir,
309
+ "embed": embeddings_dir,
310
+ "extension": extensions_dir,
311
+ "control": control_dir,
312
+ "adetailer": adetailer_dir,
313
+ "config": webui_path
314
+ }
315
+
316
+ extension_repo = []
317
+ directories = [value for key, value in prefixes.items()] # for unpucking zip files
318
+ get_ipython().system('mkdir -p {" ".join(directories)}')
319
+
320
+ hf_token = huggingface_token if huggingface_token else "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"
321
+ user_header = f"\"Authorization: Bearer {hf_token}\""
322
+
323
+ ''' Formatted Info Output '''
324
+
325
+ from math import floor
326
+
327
+ def center_text(text, terminal_width=45):
328
+ text_length = len(text)
329
+ left_padding = floor((terminal_width - text_length) / 2)
330
+ right_padding = terminal_width - text_length - left_padding
331
+ return f"\033[1m\033[36m{' ' * left_padding}{text}{' ' * right_padding}\033[0m\033[32m"
332
+
333
+ def format_output(url, dst_dir, file_name):
334
+ info = f"[{file_name.split('.')[0]}]"
335
+ info = center_text(info)
336
+
337
+ print(f"\n\033[32m{'---'*20}]{info}[{'---'*20}")
338
+ print(f"\033[33mURL: \033[34m{url}")
339
+ print(f"\033[33mSAVE DIR: \033[34m{dst_dir}")
340
+ print(f"\033[33mFILE NAME: \033[34m{file_name}\033[0m")
341
+
342
+ ''' GET CivitAi API - DATA '''
343
+
344
+ def strip_(url, file_name=None):
345
+ if 'github.com' in url:
346
+ if '/blob/' in url:
347
+ url = url.replace('/blob/', '/raw/')
348
+
349
+ elif "civitai.com" in url:
350
+ return CivitAi_API(url, file_name)
351
+
352
+ elif "huggingface.co" in url:
353
+ if '/blob/' in url:
354
+ url = url.replace('/blob/', '/resolve/')
355
+ if '?' in url:
356
+ url = url.split('?')[0]
357
+
358
+ return url
359
+
360
+ def CivitAi_API(url, file_name=None):
361
+ support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')
362
+ civitai_token = "62c0c5956b2f9defbd844d754000180b"
363
+
364
+ if '?token=' in url:
365
+ url = url.split('?token=')[0]
366
+ if '?type=' in url:
367
+ url = url.replace('?type=', f'?token={civitai_token}&type=')
368
+ else:
369
+ url = f"{url}?token={civitai_token}"
370
+
371
+ # Determine model or version id
372
+ if "civitai.com/models/" in url:
373
+ if '?modelVersionId=' in url:
374
+ version_id = url.split('?modelVersionId=')[1]
375
+ response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
376
+ # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
377
+ else:
378
+ model_id = url.split('/models/')[1].split('/')[0]
379
+ response = requests.get(f"https://civitai.com/api/v1/models/{model_id}")
380
+ # print(f"end - https://civitai.com/api/v1/models/{model_id}")
381
+ else:
382
+ version_id = url.split('/models/')[1].split('/')[0]
383
+ response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
384
+ # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
385
+
386
+ data = response.json()
387
+
388
+ if response.status_code != 200:
389
+ return None, None, None, None, None, None, None
390
+
391
+ # Define model type and name
392
+ if "civitai.com/models/" in url:
393
+ if '?modelVersionId=' in url:
394
+ model_type = data['model']['type']
395
+ model_name = data['files'][0]['name']
396
+ else:
397
+ model_type = data['type']
398
+ model_name = data['modelVersions'][0]['files'][0]['name']
399
+ elif 'type=' in url:
400
+ model_type = parse_qs(urlparse(url).query).get('type', [''])[0]
401
+ if 'model' in model_type.lower():
402
+ model_name = data['files'][0]['name']
403
+ else:
404
+ model_name = data['files'][1]['name']
405
+ else:
406
+ model_type = data['model']['type']
407
+ model_name = data['files'][0]['name']
408
+
409
+ model_name = file_name or model_name
410
+
411
+ # Determine DownloadUrl
412
+ if "civitai.com/models/" in url:
413
+ if '?modelVersionId=' in url:
414
+ download_url = data.get('downloadUrl')
415
+ else:
416
+ download_url = data["modelVersions"][0].get("downloadUrl", "")
417
+ elif 'type=' in url:
418
+ if any(t.lower() in model_type.lower() for t in support_types):
419
+ download_url = data['files'][0]['downloadUrl']
420
+ else:
421
+ download_url = data['files'][1]['downloadUrl']
422
+ else:
423
+ download_url = data.get('downloadUrl')
424
+
425
+ clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token
426
+
427
+ # Find a safe image: level less than 4 | Kaggle
428
+ image_url, image_name = None, None
429
+ if any(t in model_type for t in support_types):
430
+ try:
431
+ images = data.get('images') or data['modelVersions'][0].get('images', [])
432
+ if env == 'Kaggle':
433
+ image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)
434
+ else:
435
+ image_url = images[0]['url'] if images else None
436
+ except KeyError:
437
+ pass
438
+
439
+ # Generate a name to save the image
440
+ image_name = f"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}" if image_url else None
441
+
442
+ return f"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}", clean_url, model_type, model_name, image_url, image_name, data
443
+
444
+ ''' Main Download Code '''
445
+
446
+ def download(url):
447
+ links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]
448
+
449
+ for link_or_path in links_and_paths:
450
+ if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):
451
+ handle_manual(link_or_path)
452
+ else:
453
+ url, dst_dir, file_name = link_or_path.split()
454
+ manual_download(url, dst_dir, file_name)
455
+
456
+ unpack_zip_files()
457
+
458
+ def unpack_zip_files():
459
+ for directory in directories:
460
+ for root, _, files in os.walk(directory):
461
+ for file in files:
462
+ if file.endswith(".zip"):
463
+ zip_path = os.path.join(root, file)
464
+ extract_path = os.path.splitext(zip_path)[0]
465
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
466
+ zip_ref.extractall(extract_path)
467
+ os.remove(zip_path)
468
+
469
+ def handle_manual(url):
470
+ url_parts = url.split(':', 1)
471
+ prefix, path = url_parts[0], url_parts[1]
472
+
473
+ file_name_match = re.search(r'\[(.*?)\]', path)
474
+ file_name = file_name_match.group(1) if file_name_match else None
475
+ if file_name:
476
+ path = re.sub(r'\[.*?\]', '', path)
477
+
478
+ if prefix in prefixes:
479
+ dir = prefixes[prefix]
480
+ if prefix != "extension":
481
+ try:
482
+ manual_download(path, dir, file_name=file_name)
483
+ except Exception as e:
484
+ print(f"Error downloading file: {e}")
485
+ else:
486
+ extension_repo.append((path, file_name))
487
+
488
+ def manual_download(url, dst_dir, file_name):
489
+ header_option = f"--header={user_header}"
490
+ aria2c_header = "--header='User-Agent: Mozilla/5.0' --allow-overwrite=true"
491
+ aria2_args = "--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5"
492
+
493
+ if 'github.com' in url:
494
+ url = strip_(url)
495
+
496
+ # -- CivitAi APi+ V2 --
497
+ elif 'civitai' in url:
498
+ url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)
499
+
500
+ if image_url and image_name:
501
+ with capture.capture_output() as cap:
502
+ get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'")
503
+ del cap
504
+
505
+ elif "huggingface.co" in url:
506
+ clean_url = strip_(url)
507
+ basename = clean_url.split("/")[-1] if file_name is None else file_name
508
+
509
+ """ Formatted info output """
510
+ model_name_or_basename = file_name if not 'huggingface' in url else basename
511
+ format_output(clean_url or url, dst_dir, model_name_or_basename)
512
+
513
+ # ## -- for my tests --
514
+ # print(url, dst_dir, model_name_or_basename)
515
+ print(f"\033[31m[Data Info]:\033[0m Failed to retrieve data from the API.\n") if 'civitai' in url and not data else None
516
+ if 'civitai' in url and data and image_name:
517
+ print(f"\033[32m[Preview DL]:\033[0m {image_name} - {image_url}\n")
518
+ # =====================
519
+
520
+ # -- Git Hub --
521
+ if 'github.com' in url or 'githubusercontent.com' in url:
522
+ get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
523
+
524
+ # -- GDrive --
525
+ elif 'drive.google' in url:
526
+ try:
527
+ have_drive_link
528
+ except:
529
+ get_ipython().system('pip install -q gdown==5.2.0 > /dev/null')
530
+ have_drive_link = True
531
+
532
+ if 'folders' in url:
533
+ get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
534
+ else:
535
+ if file_name:
536
+ get_ipython().system('gdown "{url}" -O {dst_dir}/{file_name} --fuzzy -c')
537
+ else:
538
+ get_ipython().system('gdown "{url}" -O {dst_dir} --fuzzy -c')
539
+
540
+ # -- Hugging Face --
541
+ elif 'huggingface' in url:
542
+ get_ipython().system("aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
543
+
544
+ # -- Other --
545
+ elif 'http' in url:
546
+ get_ipython().system('aria2c {aria2c_header} {aria2_args} -d {dst_dir} -o "{file_name if file_name else \'\'}" \'{url}\'')
547
+
548
+ ''' SubModels - Added URLs '''
549
+
550
+ def add_submodels(selection, num_selection, model_dict, dst_dir):
551
+ if selection == "none":
552
+ return []
553
+ if selection == "ALL":
554
+ all_models = []
555
+ for models in model_dict.values():
556
+ all_models.extend(models)
557
+ selected_models = all_models
558
+ else:
559
+ selected_models = model_dict[selection]
560
+ selected_nums = map(int, num_selection.replace(',', '').split())
561
+ for num in selected_nums:
562
+ if 1 <= num <= len(model_dict):
563
+ name = list(model_dict)[num - 1]
564
+ selected_models.extend(model_dict[name])
565
+
566
+ unique_models = list({model['name']: model for model in selected_models}.values())
567
+ for model in unique_models:
568
+ model['dst_dir'] = dst_dir
569
+
570
+ return unique_models
571
+
572
+ def handle_submodels(selection, num_selection, model_dict, dst_dir, url):
573
+ submodels = add_submodels(selection, num_selection, model_dict, dst_dir)
574
+ for submodel in submodels:
575
+ if not inpainting_model and "inpainting" in submodel['name']:
576
+ continue
577
+ url += f"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, "
578
+ return url
579
+
580
+ url = handle_submodels(model, model_num, model_list, models_dir, url)
581
+ url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)
582
+ url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)
583
+
584
+ ''' file.txt - added urls '''
585
+
586
+ def process_file_download(file_url, prefixes, unique_urls):
587
+ files_urls = ""
588
+
589
+ if file_url.startswith("http"):
590
+ if "blob" in file_url:
591
+ file_url = file_url.replace("blob", "raw")
592
+ response = requests.get(file_url)
593
+ lines = response.text.split('\n')
594
+ else:
595
+ with open(file_url, 'r') as file:
596
+ lines = file.readlines()
597
+
598
+ current_tag = None
599
+ for line in lines:
600
+ line = line.strip()
601
+ if any(f'# {tag}' in line.lower() for tag in prefixes):
602
+ current_tag = next((tag for tag in prefixes if tag in line.lower()))
603
+
604
+ urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
605
+ for url in urls:
606
+ filter_url = url.split('[')[0] # same url filter
607
+
608
+ if url.startswith("http") and filter_url not in unique_urls:
609
+ files_urls += f"{current_tag}:{url}, "
610
+ unique_urls.add(filter_url)
611
+
612
+ return files_urls
613
+
614
+ file_urls = ""
615
+ unique_urls = set()
616
+
617
+ if custom_file_urls:
618
+ for custom_file_url in custom_file_urls.replace(',', '').split():
619
+ if not custom_file_url.endswith('.txt'):
620
+ custom_file_url += '.txt'
621
+ if not custom_file_url.startswith('http'):
622
+ if not custom_file_url.startswith(root_path):
623
+ custom_file_url = f'{root_path}/{custom_file_url}'
624
+
625
+ try:
626
+ file_urls += process_file_download(custom_file_url, prefixes, unique_urls)
627
+ except FileNotFoundError:
628
+ pass
629
+
630
+ # url prefixing
631
+ urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)
632
+ prefixed_urls = (f"{prefix}:{url}" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())
633
+ url += ", ".join(prefixed_urls) + ", " + file_urls
634
+
635
+ if detailed_download == "on":
636
+ print("\n\n\033[33m# ====== Detailed Download ====== #\n\033[0m")
637
+ download(url)
638
+ print("\n\033[33m# =============================== #\n\033[0m")
639
+ else:
640
+ with capture.capture_output() as cap:
641
+ download(url)
642
+ del cap
643
+
644
+ print("\r🏁 Download Complete!" + " "*15)
645
+
646
+
647
+ # Cleaning shit after downloading...
648
+ get_ipython().system('find {webui_path} \\( -type d \\( -name ".ipynb_checkpoints" -o -name ".aria2" \\) -o -type f -name "*.aria2" \\) -exec rm -r {{}} \\; >/dev/null 2>&1')
649
+
650
+
651
+ ## Install of Custom extensions
652
+ if len(extension_repo) > 0:
653
+ print("✨ Installing custom extensions...", end='', flush=True)
654
+ with capture.capture_output() as cap:
655
+ for repo, repo_name in extension_repo:
656
+ if not repo_name:
657
+ repo_name = repo.split('/')[-1]
658
+ get_ipython().system('cd {extensions_dir} && git clone {repo} {repo_name} && cd {repo_name} && git fetch')
659
+ del cap
660
+ print(f"\r📦 Installed '{len(extension_repo)}', Custom extensions!")
661
+
662
+
663
+ ## List Models and stuff V2
664
+ if detailed_download == "off":
665
+ print("\n\n\033[33mIf you don't see any downloaded files, enable the 'Detailed Downloads' feature in the widget.")
666
+
667
+ get_ipython().run_line_magic('run', '{root_path}/file_cell/special/dl_display_results.py # display widgets result')
668
+
files_cells/python/en/launch_en.py CHANGED
@@ -1,118 +1,118 @@
1
- ##~ LAUNCH CODE | BY: ANXETY ~##
2
-
3
- import os
4
- import re
5
- import time
6
- import json
7
- import requests
8
- import cloudpickle as pickle
9
- from datetime import timedelta
10
- from IPython.display import clear_output
11
-
12
- # ================= DETECT ENV =================
13
- def detect_environment():
14
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
15
- environments = {
16
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
17
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
18
- }
19
-
20
- for env_var, (environment, path) in environments.items():
21
- if env_var in os.environ:
22
- return environment, path, free_plan
23
- return 'Unknown', '/unknown/path', free_plan
24
-
25
- env, root_path, free_plan = detect_environment()
26
- webui_path = f"{root_path}/sdw"
27
-
28
- def load_settings():
29
- SETTINGS_FILE = f'{root_path}/settings.json'
30
- if os.path.exists(SETTINGS_FILE):
31
- with open(SETTINGS_FILE, 'r') as f:
32
- return json.load(f)
33
- return {}
34
-
35
- settings = load_settings()
36
- ngrok_token = settings.get('ngrok_token', "")
37
- zrok_token = settings.get('zrok_token', "")
38
- commandline_arguments = settings.get('commandline_arguments', "")
39
- change_webui = settings.get('change_webui', "")
40
-
41
- # ======================== TUNNEL V2 ========================
42
- print('Please Wait...')
43
-
44
- def get_public_ip(version='ipv4'):
45
- try:
46
- url = f'https://api64.ipify.org?format=json&{version}=true'
47
- response = requests.get(url)
48
- return response.json().get('ip', 'N/A')
49
- except Exception as e:
50
- print(f"Error getting public {version} address:", e)
51
-
52
- # Check if public IP is already saved, if not then get it
53
- public_ip_file = f"{root_path}/public_ip.txt"
54
- if os.path.exists(public_ip_file):
55
- with open(public_ip_file, 'r') as file:
56
- public_ipv4 = file.read().strip()
57
- else:
58
- public_ipv4 = get_public_ip(version='ipv4')
59
- with open(public_ip_file, 'w') as file:
60
- file.write(public_ipv4)
61
-
62
- tunnel_class = pickle.load(open(f"{root_path}/new_tunnel", "rb"), encoding="utf-8")
63
- tunnel_port = 1834
64
- tunnel = tunnel_class(tunnel_port)
65
- tunnel.add_tunnel(command="cl tunnel --url localhost:{port}", name="cl", pattern=re.compile(r"[\w-]+\.trycloudflare\.com"))
66
- tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\w-]+\.loca\.lt"), note="Password : " + "\033[32m" + public_ipv4 + "\033[0m" + " rerun cell if 404 error.")
67
-
68
- if zrok_token:
69
- get_ipython().system('zrok enable {zrok_token} &> /dev/null')
70
- tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
71
-
72
- clear_output()
73
-
74
- # =============== Automatic Fixing Path V3 ===============
75
- paths_to_check = {
76
- "tagger_hf_cache_dir": f"{webui_path}/models/interrogators/",
77
- "additional_networks_extra_lora_path": f"{webui_path}/models/Lora/",
78
- "ad_extra_models_dir": f"{webui_path}/models/adetailer/",
79
- "sd_checkpoint_hash": "",
80
- "sd_model_checkpoint": "",
81
- "sd_vae": "None"
82
- }
83
-
84
- config_path = f'{webui_path}/ui-config.json'
85
-
86
- if os.path.exists(config_path):
87
- with open(config_path, 'r') as file:
88
- config_data = json.load(file)
89
-
90
- for key, value in paths_to_check.items():
91
- if key in config_data and config_data[key] != value:
92
- sed_command = f"sed -i 's|\"{key}\": \".*\"|\"{key}\": \"{value}\"|' {config_path}"
93
- os.system(sed_command)
94
-
95
- if env == 'Kaggle':
96
- get_ipython().system('sed -i \'s/"civitai_interface\\/NSFW content\\/value":.*/"civitai_interface\\/NSFW content\\/value": false/g\' {webui_path}/ui-config.json')
97
-
98
- with tunnel:
99
- get_ipython().run_line_magic('cd', '{webui_path}')
100
-
101
- commandline_arguments += f' --port={tunnel_port}'
102
- if ngrok_token:
103
- commandline_arguments += f' --ngrok {ngrok_token}'
104
- if env != "Google Colab":
105
- commandline_arguments += f' --encrypt-pass={tunnel_port} --api'
106
-
107
- if change_webui == 'Forge':
108
- commandline_arguments += ' --cuda-stream --pin-shared-memory'
109
-
110
- get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
111
-
112
- start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
113
- time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
114
- print(f"\n⌚️ \033[0mYou have been conducting this session for - \033[33m{time_since_start}\033[0m\n\n")
115
-
116
- if zrok_token:
117
- get_ipython().system('zrok disable &> /dev/null')
118
-
 
1
+ ##~ LAUNCH CODE | BY: ANXETY ~##
2
+
3
+ import os
4
+ import re
5
+ import time
6
+ import json
7
+ import requests
8
+ import cloudpickle as pickle
9
+ from datetime import timedelta
10
+ from IPython.display import clear_output
11
+
12
+ # ================= DETECT ENV =================
13
+ def detect_environment():
14
+ free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
15
+ environments = {
16
+ 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
17
+ 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
18
+ }
19
+
20
+ for env_var, (environment, path) in environments.items():
21
+ if env_var in os.environ:
22
+ return environment, path, free_plan
23
+ return 'Unknown', '/unknown/path', free_plan
24
+
25
+ env, root_path, free_plan = detect_environment()
26
+ webui_path = f"{root_path}/sdw"
27
+
28
+ def load_settings():
29
+ SETTINGS_FILE = f'{root_path}/settings.json'
30
+ if os.path.exists(SETTINGS_FILE):
31
+ with open(SETTINGS_FILE, 'r') as f:
32
+ return json.load(f)
33
+ return {}
34
+
35
+ settings = load_settings()
36
+ ngrok_token = settings.get('ngrok_token', "")
37
+ zrok_token = settings.get('zrok_token', "")
38
+ commandline_arguments = settings.get('commandline_arguments', "")
39
+ change_webui = settings.get('change_webui', "")
40
+
41
+ # ======================== TUNNEL V2 ========================
42
+ print('Please Wait...')
43
+
44
+ def get_public_ip(version='ipv4'):
45
+ try:
46
+ url = f'https://api64.ipify.org?format=json&{version}=true'
47
+ response = requests.get(url)
48
+ return response.json().get('ip', 'N/A')
49
+ except Exception as e:
50
+ print(f"Error getting public {version} address:", e)
51
+
52
+ # Check if public IP is already saved, if not then get it
53
+ public_ip_file = f"{root_path}/public_ip.txt"
54
+ if os.path.exists(public_ip_file):
55
+ with open(public_ip_file, 'r') as file:
56
+ public_ipv4 = file.read().strip()
57
+ else:
58
+ public_ipv4 = get_public_ip(version='ipv4')
59
+ with open(public_ip_file, 'w') as file:
60
+ file.write(public_ipv4)
61
+
62
+ tunnel_class = pickle.load(open(f"{root_path}/new_tunnel", "rb"), encoding="utf-8")
63
+ tunnel_port = 1834
64
+ tunnel = tunnel_class(tunnel_port)
65
+ tunnel.add_tunnel(command="cl tunnel --url localhost:{port}", name="cl", pattern=re.compile(r"[\w-]+\.trycloudflare\.com"))
66
+ tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\w-]+\.loca\.lt"), note="Password : " + "\033[32m" + public_ipv4 + "\033[0m" + " rerun cell if 404 error.")
67
+
68
+ if zrok_token:
69
+ get_ipython().system('zrok enable {zrok_token} &> /dev/null')
70
+ tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
71
+
72
+ clear_output()
73
+
74
+ # =============== Automatic Fixing Path V3 ===============
75
+ paths_to_check = {
76
+ "tagger_hf_cache_dir": f"{webui_path}/models/interrogators/",
77
+ "additional_networks_extra_lora_path": f"{webui_path}/models/Lora/",
78
+ "ad_extra_models_dir": f"{webui_path}/models/adetailer/",
79
+ "sd_checkpoint_hash": "",
80
+ "sd_model_checkpoint": "",
81
+ "sd_vae": "None"
82
+ }
83
+
84
+ config_path = f'{webui_path}/config.json'
85
+
86
+ if os.path.exists(config_path):
87
+ with open(config_path, 'r') as file:
88
+ config_data = json.load(file)
89
+
90
+ for key, value in paths_to_check.items():
91
+ if key in config_data and config_data[key] != value:
92
+ sed_command = f"sed -i 's|\"{key}\": \".*\"|\"{key}\": \"{value}\"|' {config_path}"
93
+ os.system(sed_command)
94
+
95
+ if env == 'Kaggle':
96
+ get_ipython().system('sed -i \'s|"civitai_interface NSFW content":.*|"civitai_interface NSFW content": false,|\' {webui_path}/ui-config.json')
97
+
98
+ with tunnel:
99
+ get_ipython().run_line_magic('cd', '{webui_path}')
100
+
101
+ commandline_arguments += f' --port={tunnel_port}'
102
+ if ngrok_token:
103
+ commandline_arguments += f' --ngrok {ngrok_token}'
104
+ if env != "Google Colab":
105
+ commandline_arguments += f' --encrypt-pass={tunnel_port} --api'
106
+
107
+ if change_webui == 'Forge':
108
+ commandline_arguments += ' --cuda-stream --pin-shared-memory'
109
+
110
+ get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
111
+
112
+ start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
113
+ time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
114
+ print(f"\n⌚️ \033[0mYou have been conducting this session for - \033[33m{time_since_start}\033[0m\n\n")
115
+
116
+ if zrok_token:
117
+ get_ipython().system('zrok disable &> /dev/null')
118
+
files_cells/python/ru/downloading_ru.py CHANGED
@@ -1,666 +1,668 @@
1
- ##~ DOWNLOADING CODE | BY: ANXETY ~##
2
-
3
- import os
4
- import re
5
- import time
6
- import json
7
- import shutil
8
- import zipfile
9
- import requests
10
- import subprocess
11
- from datetime import timedelta
12
- from subprocess import getoutput
13
- from IPython.utils import capture
14
- from IPython.display import clear_output
15
- from urllib.parse import urlparse, parse_qs
16
-
17
-
18
- # ================= DETECT ENV =================
19
- def detect_environment():
20
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
21
- environments = {
22
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
23
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
24
- }
25
- for env_var, (environment, path) in environments.items():
26
- if env_var in os.environ:
27
- return environment, path, free_plan
28
-
29
- env, root_path, free_plan = detect_environment()
30
- webui_path = f"{root_path}/sdw"
31
-
32
-
33
- # ================ LIBRARIES V2 ================
34
- flag_file = f"{root_path}/libraries_installed.txt"
35
-
36
- if not os.path.exists(flag_file):
37
- print("💿 Установка библиотек, это займет какое-то время:\n")
38
-
39
- install_lib = {
40
- "aria2": "apt -y install aria2",
41
- "localtunnel": "npm install -g localtunnel",
42
- "insightface": "pip install insightface"
43
- }
44
-
45
- additional_libs = {
46
- "Google Colab": {
47
- "xformers": "pip install xformers==0.0.26.post1 --no-deps"
48
- },
49
- "Kaggle": {
50
- "xformers": "pip install xformers==0.0.26.post1",
51
- # "torch": "pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121",
52
- "aiohttp": "pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*" # fix install req
53
- }
54
- }
55
-
56
- if env in additional_libs:
57
- install_lib.update(additional_libs[env])
58
-
59
- # Loop through libraries
60
- for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):
61
- print(f"\r[{index}/{len(install_lib)}] \033[32m>>\033[0m Installing \033[33m{package}\033[0m..." + " "*35, end='')
62
- subprocess.run(install_cmd, shell=True, capture_output=True)
63
-
64
- # Additional specific packages
65
- with capture.capture_output() as cap:
66
- get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
67
- get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
68
- get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz')
69
- del cap
70
-
71
- clear_output()
72
-
73
- # Save file install lib
74
- with open(flag_file, "w") as f:
75
- f.write(">W<'")
76
-
77
- print("🍪 Библиотеки установлены!" + " "*35)
78
- time.sleep(2)
79
- clear_output()
80
-
81
-
82
- # ================= loading settings V4 =================
83
- def load_settings(path):
84
- if os.path.exists(path):
85
- with open(path, 'r') as file:
86
- return json.load(file)
87
- return {}
88
-
89
- settings = load_settings(f'{root_path}/settings.json')
90
-
91
- VARIABLES = [
92
- 'model', 'model_num', 'inpainting_model',
93
- 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',
94
- 'change_webui', 'detailed_download', 'controlnet',
95
- 'controlnet_num', 'commit_hash', 'huggingface_token',
96
- 'ngrok_token', 'zrok_token', 'commandline_arguments',
97
- 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',
98
- 'Extensions_url', 'custom_file_urls'
99
- ]
100
-
101
- locals().update({key: settings.get(key) for key in VARIABLES})
102
-
103
-
104
- # ================= OTHER =================
105
- try:
106
- start_colab
107
- except:
108
- start_colab = int(time.time())-5
109
-
110
- # CONFIG DIR
111
- models_dir = f"{webui_path}/models/Stable-diffusion"
112
- vaes_dir = f"{webui_path}/models/VAE"
113
- embeddings_dir = f"{webui_path}/embeddings"
114
- loras_dir = f"{webui_path}/models/Lora"
115
- extensions_dir = f"{webui_path}/extensions"
116
- control_dir = f"{webui_path}/models/ControlNet"
117
- adetailer_dir = f"{webui_path}/models/adetailer"
118
-
119
-
120
- # ================= MAIN CODE =================
121
- if not os.path.exists(webui_path):
122
- start_install = int(time.time())
123
- print("⌚ Распаковка Stable Diffusion..." if change_webui != 'Forge' else "⌚ Распаковка Stable Diffusion (Forge)...", end='')
124
- with capture.capture_output() as cap:
125
- aria2_command = "aria2c --console-log-level=error -c -x 16 -s 16 -k 1M"
126
- url = "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip" if change_webui != 'Forge' else "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip"
127
- get_ipython().system('{aria2_command} {url} -o repo.zip')
128
-
129
- get_ipython().system('unzip -q -o repo.zip -d {webui_path}')
130
- get_ipython().system('rm -rf repo.zip')
131
-
132
- get_ipython().run_line_magic('cd', '{root_path}')
133
- os.environ["SAFETENSORS_FAST_GPU"]='1'
134
- os.environ["CUDA_MODULE_LOADING"]="LAZY"
135
- os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
136
- os.environ["PYTHONWARNINGS"] = "ignore"
137
-
138
- get_ipython().system('echo -n {start_colab} > {webui_path}/static/colabTimer.txt')
139
- del cap
140
- install_time = timedelta(seconds=time.time()-start_install)
141
- print("\r🚀 Распаковка Завершена! За","%02d:%02d:%02d ⚡\n" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)
142
- else:
143
- print("🚀 Все распакованно... Пропуск. ⚡")
144
- start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
145
- time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
146
- print(f"⌚️ Вы проводите эту сессию в течение - \033[33m{time_since_start}\033[0m")
147
-
148
-
149
- ## Changes extensions and WebUi
150
- if latest_webui or latest_exstensions:
151
- action = "Обновление WebUI и Расширений" if latest_webui and latest_exstensions else ("Обновление WebUI" if latest_webui else "Обновление Расширений")
152
- print(f"⌚️ {action}...", end='', flush=True)
153
- with capture.capture_output() as cap:
154
- get_ipython().system('git config --global user.email "[email protected]"')
155
- get_ipython().system('git config --global user.name "Your Name"')
156
-
157
- ## Update Webui
158
- if latest_webui:
159
- get_ipython().run_line_magic('cd', '{webui_path}')
160
- get_ipython().system('git restore .')
161
- get_ipython().system('git pull -X theirs --rebase --autostash')
162
-
163
- ## Update extensions
164
- if latest_exstensions:
165
- get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}')
166
- del cap
167
- print(f"\r✨ {action} Завершено!")
168
-
169
-
170
- # === FIXING EXTENSIONS ===
171
- anxety_repos = "https://huggingface.co/NagisaNao/fast_repo/resolve/main"
172
-
173
- with capture.capture_output() as cap:
174
- # --- Umi-Wildcard ---
175
- get_ipython().system("sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default")
176
-
177
- # --- Encrypt-Image ---
178
- get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui")
179
-
180
- # --- Additional-Networks ---
181
- get_ipython().system('wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style')
182
- del cap
183
-
184
-
185
- ## Version switching
186
- if commit_hash:
187
- print('⏳ Активация машины времени...', end="", flush=True)
188
- with capture.capture_output() as cap:
189
- get_ipython().run_line_magic('cd', '{webui_path}')
190
- get_ipython().system('git config --global user.email "[email protected]"')
191
- get_ipython().system('git config --global user.name "Your Name"')
192
- get_ipython().system('git reset --hard {commit_hash}')
193
- del cap
194
- print(f"\r⌛️ Машина времени активированна! Текущий коммит: \033[34m{commit_hash}\033[0m")
195
-
196
-
197
- ## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!
198
- print("📦 Скачивание моделей и прочего...", end='')
199
- model_list = {
200
- "1.Anime (by XpucT) + INP": [
201
- {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_V2.safetensors"},
202
- {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_V2-inpainting.safetensors"}
203
- ],
204
- "2.BluMix [Anime] [V7] + INP": [
205
- {"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_V7.safetensors"},
206
- {"url": "https://civitai.com/api/download/models/363850", "name": "BluMix_V7-inpainting.safetensors"}
207
- ],
208
- "3.Cetus-Mix [Anime] [V4] + INP": [
209
- {"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
210
- {"url": "https://civitai.com/api/download/models/139882", "name": "CetusMix_V4-inpainting.safetensors"}
211
- ],
212
- "4.Counterfeit [Anime] [V3] + INP": [
213
- {"url": "https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors", "name": "Counterfeit_V3.safetensors"},
214
- {"url": "https://civitai.com/api/download/models/137911", "name": "Counterfeit_V3-inpainting.safetensors"}
215
- ],
216
- "5.CuteColor [Anime] [V3]": [
217
- {"url": "https://civitai.com/api/download/models/138754", "name": "CuteColor_V3.safetensors"}
218
- ],
219
- "6.Dark-Sushi-Mix [Anime]": [
220
- {"url": "https://civitai.com/api/download/models/101640", "name": "DarkSushiMix_2_5D.safetensors"},
221
- {"url": "https://civitai.com/api/download/models/56071", "name": "DarkSushiMix_colorful.safetensors"}
222
- ],
223
- "7.Deliberate [Realism] [V6] + INP": [
224
- {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors", "name": "Deliberate_V6.safetensors"},
225
- {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors", "name": "Deliberate_V6-inpainting.safetensors"}
226
- ],
227
- "8.Meina-Mix [Anime] [V11] + INP": [
228
- {"url": "https://civitai.com/api/download/models/119057", "name": "MeinaMix_V11.safetensors"},
229
- {"url": "https://civitai.com/api/download/models/120702", "name": "MeinaMix_V11-inpainting.safetensors"}
230
- ],
231
- "9.Mix-Pro [Anime] [V4] + INP": [
232
- {"url": "https://civitai.com/api/download/models/125668", "name": "MixPro_V4.safetensors"},
233
- {"url": "https://civitai.com/api/download/models/139878", "name": "MixPro_V4-inpainting.safetensors"}
234
- ]
235
- }
236
-
237
- vae_list = {
238
- "1.Anime.vae": [{"url": "https://civitai.com/api/download/models/311162", "name": "vae-ft-mse-840000-ema-pruned.vae.safetensors"}],
239
- "2.Anything.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors", "name": "Anything.vae.safetensors"}],
240
- "3.Blessed2.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors", "name": "Blessed2.vae.safetensors"}],
241
- "4.ClearVae.vae": [{"url": "https://civitai.com/api/download/models/88156", "name": "ClearVae_23.vae.safetensors"}],
242
- "5.WD.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors", "name": "WD.vae.safetensors"}]
243
- }
244
-
245
- controlnet_list = {
246
- "1.canny": [
247
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors", "name": "control_v11p_sd15_canny_fp16.safetensors"},
248
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml", "name": "control_v11p_sd15_canny_fp16.yaml"}
249
- ],
250
- "2.openpose": [
251
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors", "name": "control_v11p_sd15_openpose_fp16.safetensors"},
252
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml", "name": "control_v11p_sd15_openpose_fp16.yaml"}
253
- ],
254
- "3.depth": [
255
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors", "name": "control_v11f1p_sd15_depth_fp16.safetensors"},
256
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml", "name": "control_v11f1p_sd15_depth_fp16.yaml"},
257
- {"url": "https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors", "name": "control_v11p_sd15_depth_anything_fp16.safetensors"}
258
- ],
259
- "4.normal_map": [
260
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors", "name": "control_v11p_sd15_normalbae_fp16.safetensors"},
261
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml", "name": "control_v11p_sd15_normalbae_fp16.yaml"}
262
- ],
263
- "5.mlsd": [
264
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors", "name": "control_v11p_sd15_mlsd_fp16.safetensors"},
265
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml", "name": "control_v11p_sd15_mlsd_fp16.yaml"}
266
- ],
267
- "6.lineart": [
268
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors", "name": "control_v11p_sd15_lineart_fp16.safetensors"},
269
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors", "name": "control_v11p_sd15s2_lineart_anime_fp16.safetensors"},
270
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml", "name": "control_v11p_sd15_lineart_fp16.yaml"},
271
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml", "name": "control_v11p_sd15s2_lineart_anime_fp16.yaml"}
272
- ],
273
- "7.soft_edge": [
274
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors", "name": "control_v11p_sd15_softedge_fp16.safetensors"},
275
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml", "name": "control_v11p_sd15_softedge_fp16.yaml"}
276
- ],
277
- "8.scribble": [
278
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors", "name": "control_v11p_sd15_scribble_fp16.safetensors"},
279
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml", "name": "control_v11p_sd15_scribble_fp16.yaml"}
280
- ],
281
- "9.segmentation": [
282
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors", "name": "control_v11p_sd15_seg_fp16.safetensors"},
283
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml", "name": "control_v11p_sd15_seg_fp16.yaml"}
284
- ],
285
- "10.shuffle": [
286
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors", "name": "control_v11e_sd15_shuffle_fp16.safetensors"},
287
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml", "name": "control_v11e_sd15_shuffle_fp16.yaml"}
288
- ],
289
- "11.tile": [
290
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors", "name": "control_v11f1e_sd15_tile_fp16.safetensors"},
291
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml", "name": "control_v11f1e_sd15_tile_fp16.yaml"}
292
- ],
293
- "12.inpaint": [
294
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors", "name": "control_v11p_sd15_inpaint_fp16.safetensors"},
295
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml", "name": "control_v11p_sd15_inpaint_fp16.yaml"}
296
- ],
297
- "13.instruct_p2p": [
298
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors", "name": "control_v11e_sd15_ip2p_fp16.safetensors"},
299
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml", "name": "control_v11e_sd15_ip2p_fp16.yaml"}
300
- ]
301
- }
302
-
303
- url = ""
304
- prefixes = {
305
- "model": models_dir,
306
- "vae": vaes_dir,
307
- "lora": loras_dir,
308
- "embed": embeddings_dir,
309
- "extension": extensions_dir,
310
- "control": control_dir,
311
- "adetailer": adetailer_dir,
312
- "config": webui_path
313
- }
314
-
315
- extension_repo = []
316
- directories = [value for key, value in prefixes.items()] # for unpucking zip files
317
- get_ipython().system('mkdir -p {" ".join(directories)}')
318
-
319
- hf_token = huggingface_token if huggingface_token else "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"
320
- user_header = f"\"Authorization: Bearer {hf_token}\""
321
-
322
- ''' Formatted Info Output '''
323
-
324
- from math import floor
325
-
326
- def center_text(text, terminal_width=45):
327
- text_length = len(text)
328
- left_padding = floor((terminal_width - text_length) / 2)
329
- right_padding = terminal_width - text_length - left_padding
330
- return f"\033[1m\033[36m{' ' * left_padding}{text}{' ' * right_padding}\033[0m\033[32m"
331
-
332
- def format_output(url, dst_dir, file_name):
333
- info = f"[{file_name.split('.')[0]}]"
334
- info = center_text(info)
335
-
336
- print(f"\n\033[32m{'---'*20}]{info}[{'---'*20}")
337
- print(f"\033[33mURL: \033[34m{url}")
338
- print(f"\033[33mSAVE DIR: \033[34m{dst_dir}")
339
- print(f"\033[33mFILE NAME: \033[34m{file_name}\033[0m")
340
-
341
- ''' GET CivitAi API - DATA '''
342
-
343
- def strip_(url, file_name=None):
344
- if 'github.com' in url:
345
- if '/blob/' in url:
346
- url = url.replace('/blob/', '/raw/')
347
-
348
- elif "civitai.com" in url:
349
- return CivitAi_API(url, file_name)
350
-
351
- elif "huggingface.co" in url:
352
- if '/blob/' in url:
353
- url = url.replace('/blob/', '/resolve/')
354
- if '?' in url:
355
- url = url.split('?')[0]
356
-
357
- return url
358
-
359
- def CivitAi_API(url, file_name=None):
360
- support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')
361
- civitai_token = "62c0c5956b2f9defbd844d754000180b"
362
-
363
- if '?token=' in url:
364
- url = url.split('?token=')[0]
365
- if '?type=' in url:
366
- url = url.replace('?type=', f'?token={civitai_token}&type=')
367
- else:
368
- url = f"{url}?token={civitai_token}"
369
-
370
- # Determine model or version id
371
- if "civitai.com/models/" in url:
372
- if '?modelVersionId=' in url:
373
- version_id = url.split('?modelVersionId=')[1]
374
- response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
375
- # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
376
- else:
377
- model_id = url.split('/models/')[1].split('/')[0]
378
- response = requests.get(f"https://civitai.com/api/v1/models/{model_id}")
379
- # print(f"end - https://civitai.com/api/v1/models/{model_id}")
380
- else:
381
- version_id = url.split('/models/')[1].split('/')[0]
382
- response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
383
- # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
384
-
385
- data = response.json()
386
-
387
- if response.status_code != 200:
388
- return None, None, None, None, None, None, None
389
-
390
- # Define model type and name
391
- if "civitai.com/models/" in url:
392
- if '?modelVersionId=' in url:
393
- model_type = data['model']['type']
394
- model_name = data['files'][0]['name']
395
- else:
396
- model_type = data['type']
397
- model_name = data['modelVersions'][0]['files'][0]['name']
398
- elif 'type=' in url:
399
- model_type = parse_qs(urlparse(url).query).get('type', [''])[0]
400
- if 'model' in model_type.lower():
401
- model_name = data['files'][0]['name']
402
- else:
403
- model_name = data['files'][1]['name']
404
- else:
405
- model_type = data['model']['type']
406
- model_name = data['files'][0]['name']
407
-
408
- model_name = file_name or model_name
409
-
410
- # Determine DownloadUrl
411
- if "civitai.com/models/" in url:
412
- if '?modelVersionId=' in url:
413
- download_url = data.get('downloadUrl')
414
- else:
415
- download_url = data["modelVersions"][0].get("downloadUrl", "")
416
- elif 'type=' in url:
417
- if any(t.lower() in model_type.lower() for t in support_types):
418
- download_url = data['files'][0]['downloadUrl']
419
- else:
420
- download_url = data['files'][1]['downloadUrl']
421
- else:
422
- download_url = data.get('downloadUrl')
423
-
424
- clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token
425
-
426
- # Find a safe image: level less than 4 | Kaggle
427
- image_url, image_name = None, None
428
- if any(t in model_type for t in support_types):
429
- try:
430
- images = data.get('images') or data['modelVersions'][0].get('images', [])
431
- if env == 'Kaggle':
432
- image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)
433
- else:
434
- image_url = images[0]['url'] if images else None
435
- except KeyError:
436
- pass
437
-
438
- # Generate a name to save the image
439
- image_name = f"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}" if image_url else None
440
-
441
- return f"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}", clean_url, model_type, model_name, image_url, image_name, data
442
-
443
- ''' Main Download Code '''
444
-
445
- def download(url):
446
- links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]
447
-
448
- for link_or_path in links_and_paths:
449
- if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):
450
- handle_manual(link_or_path)
451
- else:
452
- url, dst_dir, file_name = link_or_path.split()
453
- manual_download(url, dst_dir, file_name)
454
-
455
- unpack_zip_files()
456
-
457
- def unpack_zip_files():
458
- for directory in directories:
459
- for root, _, files in os.walk(directory):
460
- for file in files:
461
- if file.endswith(".zip"):
462
- zip_path = os.path.join(root, file)
463
- extract_path = os.path.splitext(zip_path)[0]
464
- with zipfile.ZipFile(zip_path, 'r') as zip_ref:
465
- zip_ref.extractall(extract_path)
466
- os.remove(zip_path)
467
-
468
- def handle_manual(url):
469
- url_parts = url.split(':', 1)
470
- prefix, path = url_parts[0], url_parts[1]
471
-
472
- file_name_match = re.search(r'\[(.*?)\]', path)
473
- file_name = file_name_match.group(1) if file_name_match else None
474
- if file_name:
475
- path = re.sub(r'\[.*?\]', '', path)
476
-
477
- if prefix in prefixes:
478
- dir = prefixes[prefix]
479
- if prefix != "extension":
480
- try:
481
- manual_download(path, dir, file_name=file_name)
482
- except Exception as e:
483
- print(f"Error downloading file: {e}")
484
- else:
485
- extension_repo.append((path, file_name))
486
-
487
- def manual_download(url, dst_dir, file_name):
488
- aria2_args = '--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c'
489
- basename = url.split("/")[-1] if file_name is None else file_name
490
- header_option = f"--header={user_header}"
491
-
492
- if 'github.com' in url:
493
- url = strip_(url)
494
-
495
- # -- CivitAi APi+ V2 --
496
- elif 'civitai' in url:
497
- url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)
498
-
499
- if image_url and image_name:
500
- with capture.capture_output() as cap:
501
- get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'")
502
- del cap
503
-
504
- elif "huggingface.co" in url:
505
- clean_url = strip_(url)
506
-
507
- """ Formatted info output """
508
- model_name_or_basename = file_name if not 'huggingface' in url else basename
509
- format_output(clean_url or url, dst_dir, model_name_or_basename)
510
-
511
- # ## -- for my tests --
512
- # print(url, dst_dir, model_name_or_basename)
513
- print(f"\033[31m[Data Info]:\033[0m Failed to retrieve data from the API.\n") if 'civitai' in url and not data else None
514
- if 'civitai' in url and data and image_name:
515
- print(f"\033[32m[Preview DL]:\033[0m {image_name} - {image_url}\n")
516
- # =====================
517
-
518
- # # -- Git Hub --
519
- if 'github.com' in url or 'githubusercontent.com' in url:
520
- get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
521
-
522
- # -- GDrive --
523
- elif 'drive.google' in url:
524
- try:
525
- have_drive_link
526
- except:
527
- get_ipython().system('pip install -U gdown > /dev/null')
528
- have_drive_link = True
529
-
530
- if 'folders' in url:
531
- get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
532
- else:
533
- if file_name:
534
- get_ipython().system('gdown "{url}" -O {dst_dir}/{file_name} --fuzzy -c')
535
- else:
536
- get_ipython().system('gdown "{url}" -O {dst_dir} --fuzzy -c')
537
-
538
- # -- Hugging Face --
539
- elif 'huggingface' in url:
540
- get_ipython().system("aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
541
-
542
- # -- Other --
543
- elif 'http' in url:
544
- get_ipython().system("aria2c {aria2_args} -d {dst_dir} '{'-o' + file_name if file_name else ''}' '{url}'")
545
-
546
- ''' SubModels - Added URLs '''
547
-
548
- def add_submodels(selection, num_selection, model_dict, dst_dir):
549
- if selection == "none":
550
- return []
551
- if selection == "ALL":
552
- all_models = []
553
- for models in model_dict.values():
554
- all_models.extend(models)
555
- selected_models = all_models
556
- else:
557
- selected_models = model_dict[selection]
558
- selected_nums = map(int, num_selection.replace(',', '').split())
559
- for num in selected_nums:
560
- if 1 <= num <= len(model_dict):
561
- name = list(model_dict)[num - 1]
562
- selected_models.extend(model_dict[name])
563
-
564
- unique_models = list({model['name']: model for model in selected_models}.values())
565
- for model in unique_models:
566
- model['dst_dir'] = dst_dir
567
-
568
- return unique_models
569
-
570
- def handle_submodels(selection, num_selection, model_dict, dst_dir, url):
571
- submodels = add_submodels(selection, num_selection, model_dict, dst_dir)
572
- for submodel in submodels:
573
- if not inpainting_model and "inpainting" in submodel['name']:
574
- continue
575
- url += f"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, "
576
- return url
577
-
578
- url = handle_submodels(model, model_num, model_list, models_dir, url)
579
- url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)
580
- url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)
581
-
582
- ''' file.txt - added urls '''
583
-
584
- def process_file_download(file_url, prefixes, unique_urls):
585
- files_urls = ""
586
-
587
- if file_url.startswith("http"):
588
- if "blob" in file_url:
589
- file_url = file_url.replace("blob", "raw")
590
- response = requests.get(file_url)
591
- lines = response.text.split('\n')
592
- else:
593
- with open(file_url, 'r') as file:
594
- lines = file.readlines()
595
-
596
- current_tag = None
597
- for line in lines:
598
- line = line.strip()
599
- if any(f'# {tag}' in line.lower() for tag in prefixes):
600
- current_tag = next((tag for tag in prefixes if tag in line.lower()))
601
-
602
- urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
603
- for url in urls:
604
- filter_url = url.split('[')[0] # same url filter
605
-
606
- if url.startswith("http") and filter_url not in unique_urls:
607
- files_urls += f"{current_tag}:{url}, "
608
- unique_urls.add(filter_url)
609
-
610
- return files_urls
611
-
612
- file_urls = ""
613
- unique_urls = set()
614
-
615
- if custom_file_urls:
616
- for custom_file_url in custom_file_urls.replace(',', '').split():
617
- if not custom_file_url.endswith('.txt'):
618
- custom_file_url += '.txt'
619
- if not custom_file_url.startswith('http'):
620
- if not custom_file_url.startswith(root_path):
621
- custom_file_url = f'{root_path}/{custom_file_url}'
622
-
623
- try:
624
- file_urls += process_file_download(custom_file_url, prefixes, unique_urls)
625
- except FileNotFoundError:
626
- pass
627
-
628
- # url prefixing
629
- urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)
630
- prefixed_urls = (f"{prefix}:{url}" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())
631
- url += ", ".join(prefixed_urls) + ", " + file_urls
632
-
633
- if detailed_download == "on":
634
- print("\n\n\033[33m# ====== Подробная Загрузка ====== #\n\033[0m")
635
- download(url)
636
- print("\n\033[33m# =============================== #\n\033[0m")
637
- else:
638
- with capture.capture_output() as cap:
639
- download(url)
640
- del cap
641
-
642
- print("\r🏁 Ск��чивание Завершено!" + " "*15)
643
-
644
-
645
- # Cleaning shit after downloading...
646
- get_ipython().system('find {webui_path} \\( -type d \\( -name ".ipynb_checkpoints" -o -name ".aria2" \\) -o -type f -name "*.aria2" \\) -exec rm -r {{}} \\; >/dev/null 2>&1')
647
-
648
-
649
- ## Install of Custom extensions
650
- if len(extension_repo) > 0:
651
- print("✨ Установка кастомных расширений...", end='', flush=True)
652
- with capture.capture_output() as cap:
653
- for repo, repo_name in extension_repo:
654
- if not repo_name:
655
- repo_name = repo.split('/')[-1]
656
- get_ipython().system('cd {extensions_dir} && git clone {repo} {repo_name} && cd {repo_name} && git fetch')
657
- del cap
658
- print(f"\r📦 Установлено '{len(extension_repo)}', Кастомных расширений!")
659
-
660
-
661
- ## List Models and stuff V2
662
- if detailed_download == "off":
663
- print("\n\n\033[33mЕсли вы не видете каких-то скаченных файлов, включите в виджетах функцию 'Подробная Загрузка'.")
664
-
665
- get_ipython().run_line_magic('run', '{root_path}/file_cell/special/dl_display_results.py # display widgets result')
666
-
 
 
 
1
+ ##~ DOWNLOADING CODE | BY: ANXETY ~##
2
+
3
+ import os
4
+ import re
5
+ import time
6
+ import json
7
+ import shutil
8
+ import zipfile
9
+ import requests
10
+ import subprocess
11
+ from datetime import timedelta
12
+ from subprocess import getoutput
13
+ from IPython.utils import capture
14
+ from IPython.display import clear_output
15
+ from urllib.parse import urlparse, parse_qs
16
+
17
+
18
+ # ================= DETECT ENV =================
19
+ def detect_environment():
20
+ free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
21
+ environments = {
22
+ 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
23
+ 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
24
+ }
25
+ for env_var, (environment, path) in environments.items():
26
+ if env_var in os.environ:
27
+ return environment, path, free_plan
28
+
29
+ env, root_path, free_plan = detect_environment()
30
+ webui_path = f"{root_path}/sdw"
31
+
32
+
33
+ # ================ LIBRARIES V2 ================
34
+ flag_file = f"{root_path}/libraries_installed.txt"
35
+
36
+ if not os.path.exists(flag_file):
37
+ print("💿 Установка библиотек, это займет какое-то время:\n")
38
+
39
+ install_lib = {
40
+ # "aria2": "apt -y install aria2",
41
+ "aria2": "pip install aria2",
42
+ "localtunnel": "npm install -g localtunnel",
43
+ "insightface": "pip install insightface"
44
+ }
45
+
46
+ additional_libs = {
47
+ "Google Colab": {
48
+ "xformers": "pip install xformers==0.0.27 --no-deps"
49
+ },
50
+ "Kaggle": {
51
+ "xformers": "pip install xformers==0.0.26.post1",
52
+ # "torch": "pip install torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121",
53
+ # "aiohttp": "pip install trash-cli && trash-put /opt/conda/lib/python3.10/site-packages/aiohttp*" # fix install req
54
+ }
55
+ }
56
+
57
+ if env in additional_libs:
58
+ install_lib.update(additional_libs[env])
59
+
60
+ # Loop through libraries
61
+ for index, (package, install_cmd) in enumerate(install_lib.items(), start=1):
62
+ print(f"\r[{index}/{len(install_lib)}] \033[32m>>\033[0m Installing \033[33m{package}\033[0m..." + " "*35, end='')
63
+ subprocess.run(install_cmd, shell=True, capture_output=True)
64
+
65
+ # Additional specific packages
66
+ with capture.capture_output() as cap:
67
+ get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
68
+ get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
69
+ get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz')
70
+ del cap
71
+
72
+ clear_output()
73
+
74
+ # Save file install lib
75
+ with open(flag_file, "w") as f:
76
+ f.write(">W<'")
77
+
78
+ print("🍪 Библиотеки установлены!" + " "*35)
79
+ time.sleep(2)
80
+ clear_output()
81
+
82
+
83
+ # ================= loading settings V4 =================
84
+ def load_settings(path):
85
+ if os.path.exists(path):
86
+ with open(path, 'r') as file:
87
+ return json.load(file)
88
+ return {}
89
+
90
+ settings = load_settings(f'{root_path}/settings.json')
91
+
92
+ VARIABLES = [
93
+ 'model', 'model_num', 'inpainting_model',
94
+ 'vae', 'vae_num', 'latest_webui', 'latest_exstensions',
95
+ 'change_webui', 'detailed_download', 'controlnet',
96
+ 'controlnet_num', 'commit_hash', 'huggingface_token',
97
+ 'ngrok_token', 'zrok_token', 'commandline_arguments',
98
+ 'Model_url', 'Vae_url', 'LoRA_url', 'Embedding_url',
99
+ 'Extensions_url', 'custom_file_urls'
100
+ ]
101
+
102
+ locals().update({key: settings.get(key) for key in VARIABLES})
103
+
104
+
105
+ # ================= OTHER =================
106
+ try:
107
+ start_colab
108
+ except:
109
+ start_colab = int(time.time())-5
110
+
111
+ # CONFIG DIR
112
+ models_dir = f"{webui_path}/models/Stable-diffusion"
113
+ vaes_dir = f"{webui_path}/models/VAE"
114
+ embeddings_dir = f"{webui_path}/embeddings"
115
+ loras_dir = f"{webui_path}/models/Lora"
116
+ extensions_dir = f"{webui_path}/extensions"
117
+ control_dir = f"{webui_path}/models/ControlNet"
118
+ adetailer_dir = f"{webui_path}/models/adetailer"
119
+
120
+
121
+ # ================= MAIN CODE =================
122
+ if not os.path.exists(webui_path):
123
+ start_install = int(time.time())
124
+ print("⌚ Распаковка Stable Diffusion..." if change_webui != 'Forge' else "⌚ Распаковка Stable Diffusion (Forge)...", end='')
125
+ with capture.capture_output() as cap:
126
+ aria2_command = "aria2c --console-log-level=error -c -x 16 -s 16 -k 1M"
127
+ url = "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO.zip" if change_webui != 'Forge' else "https://huggingface.co/NagisaNao/fast_repo/resolve/main/FULL_REPO_forge.zip"
128
+ get_ipython().system('{aria2_command} {url} -o repo.zip')
129
+
130
+ get_ipython().system('unzip -q -o repo.zip -d {webui_path}')
131
+ get_ipython().system('rm -rf repo.zip')
132
+
133
+ get_ipython().run_line_magic('cd', '{root_path}')
134
+ os.environ["SAFETENSORS_FAST_GPU"]='1'
135
+ os.environ["CUDA_MODULE_LOADING"]="LAZY"
136
+ os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
137
+ os.environ["PYTHONWARNINGS"] = "ignore"
138
+
139
+ get_ipython().system('echo -n {start_colab} > {webui_path}/static/colabTimer.txt')
140
+ del cap
141
+ install_time = timedelta(seconds=time.time()-start_install)
142
+ print("\r🚀 Распаковка Завершена! За","%02d:%02d:%02d ⚡\n" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)
143
+ else:
144
+ print("🚀 Все распакованно... Пропуск. ⚡")
145
+ start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
146
+ time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
147
+ print(f"⌚️ Вы проводите эту сессию в течение - \033[33m{time_since_start}\033[0m")
148
+
149
+
150
+ ## Changes extensions and WebUi
151
+ if latest_webui or latest_exstensions:
152
+ action = "Обновление WebUI и Расширений" if latest_webui and latest_exstensions else ("Обновление WebUI" if latest_webui else "Обновление Расширений")
153
+ print(f"⌚️ {action}...", end='', flush=True)
154
+ with capture.capture_output() as cap:
155
+ get_ipython().system('git config --global user.email "[email protected]"')
156
+ get_ipython().system('git config --global user.name "Your Name"')
157
+
158
+ ## Update Webui
159
+ if latest_webui:
160
+ get_ipython().run_line_magic('cd', '{webui_path}')
161
+ get_ipython().system('git restore .')
162
+ get_ipython().system('git pull -X theirs --rebase --autostash')
163
+
164
+ ## Update extensions
165
+ if latest_exstensions:
166
+ get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}')
167
+ del cap
168
+ print(f"\r✨ {action} Завершено!")
169
+
170
+
171
+ # === FIXING EXTENSIONS ===
172
+ anxety_repos = "https://huggingface.co/NagisaNao/fast_repo/resolve/main"
173
+
174
+ with capture.capture_output() as cap:
175
+ # --- Umi-Wildcard ---
176
+ get_ipython().system("sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default")
177
+
178
+ # --- Encrypt-Image ---
179
+ get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui")
180
+
181
+ # --- Additional-Networks ---
182
+ get_ipython().system('wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style')
183
+ del cap
184
+
185
+
186
+ ## Version switching
187
+ if commit_hash:
188
+ print('⏳ Активация машины времени...', end="", flush=True)
189
+ with capture.capture_output() as cap:
190
+ get_ipython().run_line_magic('cd', '{webui_path}')
191
+ get_ipython().system('git config --global user.email "[email protected]"')
192
+ get_ipython().system('git config --global user.name "Your Name"')
193
+ get_ipython().system('git reset --hard {commit_hash}')
194
+ del cap
195
+ print(f"\r⌛️ Машина времени активированна! Текущий коммит: \033[34m{commit_hash}\033[0m")
196
+
197
+
198
+ ## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!
199
+ print("📦 Скачивание моделей и прочего...", end='')
200
+ model_list = {
201
+ "1.Anime (by XpucT) + INP": [
202
+ {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_V2.safetensors"},
203
+ {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_V2-inpainting.safetensors"}
204
+ ],
205
+ "2.BluMix [Anime] [V7] + INP": [
206
+ {"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_V7.safetensors"},
207
+ {"url": "https://civitai.com/api/download/models/363850", "name": "BluMix_V7-inpainting.safetensors"}
208
+ ],
209
+ "3.Cetus-Mix [Anime] [V4] + INP": [
210
+ {"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
211
+ {"url": "https://civitai.com/api/download/models/139882", "name": "CetusMix_V4-inpainting.safetensors"}
212
+ ],
213
+ "4.Counterfeit [Anime] [V3] + INP": [
214
+ {"url": "https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors", "name": "Counterfeit_V3.safetensors"},
215
+ {"url": "https://civitai.com/api/download/models/137911", "name": "Counterfeit_V3-inpainting.safetensors"}
216
+ ],
217
+ "5.CuteColor [Anime] [V3]": [
218
+ {"url": "https://civitai.com/api/download/models/138754", "name": "CuteColor_V3.safetensors"}
219
+ ],
220
+ "6.Dark-Sushi-Mix [Anime]": [
221
+ {"url": "https://civitai.com/api/download/models/101640", "name": "DarkSushiMix_2_5D.safetensors"},
222
+ {"url": "https://civitai.com/api/download/models/56071", "name": "DarkSushiMix_colorful.safetensors"}
223
+ ],
224
+ "7.Deliberate [Realism] [V6] + INP": [
225
+ {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors", "name": "Deliberate_V6.safetensors"},
226
+ {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors", "name": "Deliberate_V6-inpainting.safetensors"}
227
+ ],
228
+ "8.Meina-Mix [Anime] [V11] + INP": [
229
+ {"url": "https://civitai.com/api/download/models/119057", "name": "MeinaMix_V11.safetensors"},
230
+ {"url": "https://civitai.com/api/download/models/120702", "name": "MeinaMix_V11-inpainting.safetensors"}
231
+ ],
232
+ "9.Mix-Pro [Anime] [V4] + INP": [
233
+ {"url": "https://civitai.com/api/download/models/125668", "name": "MixPro_V4.safetensors"},
234
+ {"url": "https://civitai.com/api/download/models/139878", "name": "MixPro_V4-inpainting.safetensors"}
235
+ ]
236
+ }
237
+
238
+ vae_list = {
239
+ "1.Anime.vae": [{"url": "https://civitai.com/api/download/models/311162", "name": "Anime.vae.safetensors"}],
240
+ "2.Anything.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors", "name": "Anything.vae.safetensors"}],
241
+ "3.Blessed2.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors", "name": "Blessed2.vae.safetensors"}],
242
+ "4.ClearVae.vae": [{"url": "https://civitai.com/api/download/models/88156", "name": "ClearVae_23.vae.safetensors"}],
243
+ "5.WD.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors", "name": "WD.vae.safetensors"}]
244
+ }
245
+
246
+ controlnet_list = {
247
+ "1.canny": [
248
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors", "name": "control_v11p_sd15_canny_fp16.safetensors"},
249
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml", "name": "control_v11p_sd15_canny_fp16.yaml"}
250
+ ],
251
+ "2.openpose": [
252
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors", "name": "control_v11p_sd15_openpose_fp16.safetensors"},
253
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml", "name": "control_v11p_sd15_openpose_fp16.yaml"}
254
+ ],
255
+ "3.depth": [
256
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors", "name": "control_v11f1p_sd15_depth_fp16.safetensors"},
257
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml", "name": "control_v11f1p_sd15_depth_fp16.yaml"},
258
+ {"url": "https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors", "name": "control_v11p_sd15_depth_anything_fp16.safetensors"}
259
+ ],
260
+ "4.normal_map": [
261
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors", "name": "control_v11p_sd15_normalbae_fp16.safetensors"},
262
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml", "name": "control_v11p_sd15_normalbae_fp16.yaml"}
263
+ ],
264
+ "5.mlsd": [
265
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors", "name": "control_v11p_sd15_mlsd_fp16.safetensors"},
266
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml", "name": "control_v11p_sd15_mlsd_fp16.yaml"}
267
+ ],
268
+ "6.lineart": [
269
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors", "name": "control_v11p_sd15_lineart_fp16.safetensors"},
270
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors", "name": "control_v11p_sd15s2_lineart_anime_fp16.safetensors"},
271
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml", "name": "control_v11p_sd15_lineart_fp16.yaml"},
272
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml", "name": "control_v11p_sd15s2_lineart_anime_fp16.yaml"}
273
+ ],
274
+ "7.soft_edge": [
275
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors", "name": "control_v11p_sd15_softedge_fp16.safetensors"},
276
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml", "name": "control_v11p_sd15_softedge_fp16.yaml"}
277
+ ],
278
+ "8.scribble": [
279
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors", "name": "control_v11p_sd15_scribble_fp16.safetensors"},
280
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml", "name": "control_v11p_sd15_scribble_fp16.yaml"}
281
+ ],
282
+ "9.segmentation": [
283
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors", "name": "control_v11p_sd15_seg_fp16.safetensors"},
284
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml", "name": "control_v11p_sd15_seg_fp16.yaml"}
285
+ ],
286
+ "10.shuffle": [
287
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors", "name": "control_v11e_sd15_shuffle_fp16.safetensors"},
288
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml", "name": "control_v11e_sd15_shuffle_fp16.yaml"}
289
+ ],
290
+ "11.tile": [
291
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors", "name": "control_v11f1e_sd15_tile_fp16.safetensors"},
292
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml", "name": "control_v11f1e_sd15_tile_fp16.yaml"}
293
+ ],
294
+ "12.inpaint": [
295
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors", "name": "control_v11p_sd15_inpaint_fp16.safetensors"},
296
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml", "name": "control_v11p_sd15_inpaint_fp16.yaml"}
297
+ ],
298
+ "13.instruct_p2p": [
299
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors", "name": "control_v11e_sd15_ip2p_fp16.safetensors"},
300
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml", "name": "control_v11e_sd15_ip2p_fp16.yaml"}
301
+ ]
302
+ }
303
+
304
+ url = ""
305
+ prefixes = {
306
+ "model": models_dir,
307
+ "vae": vaes_dir,
308
+ "lora": loras_dir,
309
+ "embed": embeddings_dir,
310
+ "extension": extensions_dir,
311
+ "control": control_dir,
312
+ "adetailer": adetailer_dir,
313
+ "config": webui_path
314
+ }
315
+
316
+ extension_repo = []
317
+ directories = [value for key, value in prefixes.items()] # for unpucking zip files
318
+ get_ipython().system('mkdir -p {" ".join(directories)}')
319
+
320
+ hf_token = huggingface_token if huggingface_token else "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"
321
+ user_header = f"\"Authorization: Bearer {hf_token}\""
322
+
323
+ ''' Formatted Info Output '''
324
+
325
+ from math import floor
326
+
327
+ def center_text(text, terminal_width=45):
328
+ text_length = len(text)
329
+ left_padding = floor((terminal_width - text_length) / 2)
330
+ right_padding = terminal_width - text_length - left_padding
331
+ return f"\033[1m\033[36m{' ' * left_padding}{text}{' ' * right_padding}\033[0m\033[32m"
332
+
333
+ def format_output(url, dst_dir, file_name):
334
+ info = f"[{file_name.split('.')[0]}]"
335
+ info = center_text(info)
336
+
337
+ print(f"\n\033[32m{'---'*20}]{info}[{'---'*20}")
338
+ print(f"\033[33mURL: \033[34m{url}")
339
+ print(f"\033[33mSAVE DIR: \033[34m{dst_dir}")
340
+ print(f"\033[33mFILE NAME: \033[34m{file_name}\033[0m")
341
+
342
+ ''' GET CivitAi API - DATA '''
343
+
344
+ def strip_(url, file_name=None):
345
+ if 'github.com' in url:
346
+ if '/blob/' in url:
347
+ url = url.replace('/blob/', '/raw/')
348
+
349
+ elif "civitai.com" in url:
350
+ return CivitAi_API(url, file_name)
351
+
352
+ elif "huggingface.co" in url:
353
+ if '/blob/' in url:
354
+ url = url.replace('/blob/', '/resolve/')
355
+ if '?' in url:
356
+ url = url.split('?')[0]
357
+
358
+ return url
359
+
360
+ def CivitAi_API(url, file_name=None):
361
+ support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')
362
+ civitai_token = "62c0c5956b2f9defbd844d754000180b"
363
+
364
+ if '?token=' in url:
365
+ url = url.split('?token=')[0]
366
+ if '?type=' in url:
367
+ url = url.replace('?type=', f'?token={civitai_token}&type=')
368
+ else:
369
+ url = f"{url}?token={civitai_token}"
370
+
371
+ # Determine model or version id
372
+ if "civitai.com/models/" in url:
373
+ if '?modelVersionId=' in url:
374
+ version_id = url.split('?modelVersionId=')[1]
375
+ response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
376
+ # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
377
+ else:
378
+ model_id = url.split('/models/')[1].split('/')[0]
379
+ response = requests.get(f"https://civitai.com/api/v1/models/{model_id}")
380
+ # print(f"end - https://civitai.com/api/v1/models/{model_id}")
381
+ else:
382
+ version_id = url.split('/models/')[1].split('/')[0]
383
+ response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
384
+ # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
385
+
386
+ data = response.json()
387
+
388
+ if response.status_code != 200:
389
+ return None, None, None, None, None, None, None
390
+
391
+ # Define model type and name
392
+ if "civitai.com/models/" in url:
393
+ if '?modelVersionId=' in url:
394
+ model_type = data['model']['type']
395
+ model_name = data['files'][0]['name']
396
+ else:
397
+ model_type = data['type']
398
+ model_name = data['modelVersions'][0]['files'][0]['name']
399
+ elif 'type=' in url:
400
+ model_type = parse_qs(urlparse(url).query).get('type', [''])[0]
401
+ if 'model' in model_type.lower():
402
+ model_name = data['files'][0]['name']
403
+ else:
404
+ model_name = data['files'][1]['name']
405
+ else:
406
+ model_type = data['model']['type']
407
+ model_name = data['files'][0]['name']
408
+
409
+ model_name = file_name or model_name
410
+
411
+ # Determine DownloadUrl
412
+ if "civitai.com/models/" in url:
413
+ if '?modelVersionId=' in url:
414
+ download_url = data.get('downloadUrl')
415
+ else:
416
+ download_url = data["modelVersions"][0].get("downloadUrl", "")
417
+ elif 'type=' in url:
418
+ if any(t.lower() in model_type.lower() for t in support_types):
419
+ download_url = data['files'][0]['downloadUrl']
420
+ else:
421
+ download_url = data['files'][1]['downloadUrl']
422
+ else:
423
+ download_url = data.get('downloadUrl')
424
+
425
+ clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token
426
+
427
+ # Find a safe image: level less than 4 | Kaggle
428
+ image_url, image_name = None, None
429
+ if any(t in model_type for t in support_types):
430
+ try:
431
+ images = data.get('images') or data['modelVersions'][0].get('images', [])
432
+ if env == 'Kaggle':
433
+ image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)
434
+ else:
435
+ image_url = images[0]['url'] if images else None
436
+ except KeyError:
437
+ pass
438
+
439
+ # Generate a name to save the image
440
+ image_name = f"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}" if image_url else None
441
+
442
+ return f"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}", clean_url, model_type, model_name, image_url, image_name, data
443
+
444
+ ''' Main Download Code '''
445
+
446
+ def download(url):
447
+ links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]
448
+
449
+ for link_or_path in links_and_paths:
450
+ if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):
451
+ handle_manual(link_or_path)
452
+ else:
453
+ url, dst_dir, file_name = link_or_path.split()
454
+ manual_download(url, dst_dir, file_name)
455
+
456
+ unpack_zip_files()
457
+
458
+ def unpack_zip_files():
459
+ for directory in directories:
460
+ for root, _, files in os.walk(directory):
461
+ for file in files:
462
+ if file.endswith(".zip"):
463
+ zip_path = os.path.join(root, file)
464
+ extract_path = os.path.splitext(zip_path)[0]
465
+ with zipfile.ZipFile(zip_path, 'r') as zip_ref:
466
+ zip_ref.extractall(extract_path)
467
+ os.remove(zip_path)
468
+
469
+ def handle_manual(url):
470
+ url_parts = url.split(':', 1)
471
+ prefix, path = url_parts[0], url_parts[1]
472
+
473
+ file_name_match = re.search(r'\[(.*?)\]', path)
474
+ file_name = file_name_match.group(1) if file_name_match else None
475
+ if file_name:
476
+ path = re.sub(r'\[.*?\]', '', path)
477
+
478
+ if prefix in prefixes:
479
+ dir = prefixes[prefix]
480
+ if prefix != "extension":
481
+ try:
482
+ manual_download(path, dir, file_name=file_name)
483
+ except Exception as e:
484
+ print(f"Error downloading file: {e}")
485
+ else:
486
+ extension_repo.append((path, file_name))
487
+
488
+ def manual_download(url, dst_dir, file_name):
489
+ header_option = f"--header={user_header}"
490
+ aria2c_header = "--header='User-Agent: Mozilla/5.0' --allow-overwrite=true"
491
+ aria2_args = "--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5"
492
+
493
+ if 'github.com' in url:
494
+ url = strip_(url)
495
+
496
+ # -- CivitAi APi+ V2 --
497
+ elif 'civitai' in url:
498
+ url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)
499
+
500
+ if image_url and image_name:
501
+ with capture.capture_output() as cap:
502
+ get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'")
503
+ del cap
504
+
505
+ elif "huggingface.co" in url:
506
+ clean_url = strip_(url)
507
+ basename = clean_url.split("/")[-1] if file_name is None else file_name
508
+
509
+ """ Formatted info output """
510
+ model_name_or_basename = file_name if not 'huggingface' in url else basename
511
+ format_output(clean_url or url, dst_dir, model_name_or_basename)
512
+
513
+ # ## -- for my tests --
514
+ # print(url, dst_dir, model_name_or_basename)
515
+ print(f"\033[31m[Data Info]:\033[0m Failed to retrieve data from the API.\n") if 'civitai' in url and not data else None
516
+ if 'civitai' in url and data and image_name:
517
+ print(f"\033[32m[Preview DL]:\033[0m {image_name} - {image_url}\n")
518
+ # =====================
519
+
520
+ # -- Git Hub --
521
+ if 'github.com' in url or 'githubusercontent.com' in url:
522
+ get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
523
+
524
+ # -- GDrive --
525
+ elif 'drive.google' in url:
526
+ try:
527
+ have_drive_link
528
+ except:
529
+ get_ipython().system('pip install -q gdown==5.2.0 > /dev/null')
530
+ have_drive_link = True
531
+
532
+ if 'folders' in url:
533
+ get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
534
+ else:
535
+ if file_name:
536
+ get_ipython().system('gdown "{url}" -O {dst_dir}/{file_name} --fuzzy -c')
537
+ else:
538
+ get_ipython().system('gdown "{url}" -O {dst_dir} --fuzzy -c')
539
+
540
+ # -- Hugging Face --
541
+ elif 'huggingface' in url:
542
+ get_ipython().system("aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
543
+
544
+ # -- Other --
545
+ elif 'http' in url:
546
+ get_ipython().system('aria2c {aria2c_header} {aria2_args} -d {dst_dir} -o "{file_name if file_name else \'\'}" \'{url}\'')
547
+
548
+ ''' SubModels - Added URLs '''
549
+
550
+ def add_submodels(selection, num_selection, model_dict, dst_dir):
551
+ if selection == "none":
552
+ return []
553
+ if selection == "ALL":
554
+ all_models = []
555
+ for models in model_dict.values():
556
+ all_models.extend(models)
557
+ selected_models = all_models
558
+ else:
559
+ selected_models = model_dict[selection]
560
+ selected_nums = map(int, num_selection.replace(',', '').split())
561
+ for num in selected_nums:
562
+ if 1 <= num <= len(model_dict):
563
+ name = list(model_dict)[num - 1]
564
+ selected_models.extend(model_dict[name])
565
+
566
+ unique_models = list({model['name']: model for model in selected_models}.values())
567
+ for model in unique_models:
568
+ model['dst_dir'] = dst_dir
569
+
570
+ return unique_models
571
+
572
+ def handle_submodels(selection, num_selection, model_dict, dst_dir, url):
573
+ submodels = add_submodels(selection, num_selection, model_dict, dst_dir)
574
+ for submodel in submodels:
575
+ if not inpainting_model and "inpainting" in submodel['name']:
576
+ continue
577
+ url += f"{submodel['url']} {submodel['dst_dir']} {submodel['name']}, "
578
+ return url
579
+
580
+ url = handle_submodels(model, model_num, model_list, models_dir, url)
581
+ url = handle_submodels(vae, vae_num, vae_list, vaes_dir, url)
582
+ url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir, url)
583
+
584
+ ''' file.txt - added urls '''
585
+
586
+ def process_file_download(file_url, prefixes, unique_urls):
587
+ files_urls = ""
588
+
589
+ if file_url.startswith("http"):
590
+ if "blob" in file_url:
591
+ file_url = file_url.replace("blob", "raw")
592
+ response = requests.get(file_url)
593
+ lines = response.text.split('\n')
594
+ else:
595
+ with open(file_url, 'r') as file:
596
+ lines = file.readlines()
597
+
598
+ current_tag = None
599
+ for line in lines:
600
+ line = line.strip()
601
+ if any(f'# {tag}' in line.lower() for tag in prefixes):
602
+ current_tag = next((tag for tag in prefixes if tag in line.lower()))
603
+
604
+ urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
605
+ for url in urls:
606
+ filter_url = url.split('[')[0] # same url filter
607
+
608
+ if url.startswith("http") and filter_url not in unique_urls:
609
+ files_urls += f"{current_tag}:{url}, "
610
+ unique_urls.add(filter_url)
611
+
612
+ return files_urls
613
+
614
+ file_urls = ""
615
+ unique_urls = set()
616
+
617
+ if custom_file_urls:
618
+ for custom_file_url in custom_file_urls.replace(',', '').split():
619
+ if not custom_file_url.endswith('.txt'):
620
+ custom_file_url += '.txt'
621
+ if not custom_file_url.startswith('http'):
622
+ if not custom_file_url.startswith(root_path):
623
+ custom_file_url = f'{root_path}/{custom_file_url}'
624
+
625
+ try:
626
+ file_urls += process_file_download(custom_file_url, prefixes, unique_urls)
627
+ except FileNotFoundError:
628
+ pass
629
+
630
+ # url prefixing
631
+ urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)
632
+ prefixed_urls = (f"{prefix}:{url}" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())
633
+ url += ", ".join(prefixed_urls) + ", " + file_urls
634
+
635
+ if detailed_download == "on":
636
+ print("\n\n\033[33m# ====== Подробная Загрузка ====== #\n\033[0m")
637
+ download(url)
638
+ print("\n\033[33m# =============================== #\n\033[0m")
639
+ else:
640
+ with capture.capture_output() as cap:
641
+ download(url)
642
+ del cap
643
+
644
+ print("\r🏁 Скачивание Завершено!" + " "*15)
645
+
646
+
647
+ # Cleaning shit after downloading...
648
+ get_ipython().system('find {webui_path} \\( -type d \\( -name ".ipynb_checkpoints" -o -name ".aria2" \\) -o -type f -name "*.aria2" \\) -exec rm -r {{}} \\; >/dev/null 2>&1')
649
+
650
+
651
+ ## Install of Custom extensions
652
+ if len(extension_repo) > 0:
653
+ print("✨ Установка кастомных расширений...", end='', flush=True)
654
+ with capture.capture_output() as cap:
655
+ for repo, repo_name in extension_repo:
656
+ if not repo_name:
657
+ repo_name = repo.split('/')[-1]
658
+ get_ipython().system('cd {extensions_dir} && git clone {repo} {repo_name} && cd {repo_name} && git fetch')
659
+ del cap
660
+ print(f"\r📦 Установлено '{len(extension_repo)}', Кастомных расширений!")
661
+
662
+
663
+ ## List Models and stuff V2
664
+ if detailed_download == "off":
665
+ print("\n\n\033[33mЕсли вы не видете каких-то скаченных файлов, включите в виджетах функцию 'Подробная Загрузка'.")
666
+
667
+ get_ipython().run_line_magic('run', '{root_path}/file_cell/special/dl_display_results.py # display widgets result')
668
+
files_cells/python/ru/launch_ru.py CHANGED
@@ -1,118 +1,118 @@
1
- ##~ LAUNCH CODE | BY: ANXETY ~##
2
-
3
- import os
4
- import re
5
- import time
6
- import json
7
- import requests
8
- import cloudpickle as pickle
9
- from datetime import timedelta
10
- from IPython.display import clear_output
11
-
12
- # ================= DETECT ENV =================
13
- def detect_environment():
14
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
15
- environments = {
16
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
17
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
18
- }
19
-
20
- for env_var, (environment, path) in environments.items():
21
- if env_var in os.environ:
22
- return environment, path, free_plan
23
- return 'Unknown', '/unknown/path', free_plan
24
-
25
- env, root_path, free_plan = detect_environment()
26
- webui_path = f"{root_path}/sdw"
27
-
28
- def load_settings():
29
- SETTINGS_FILE = f'{root_path}/settings.json'
30
- if os.path.exists(SETTINGS_FILE):
31
- with open(SETTINGS_FILE, 'r') as f:
32
- return json.load(f)
33
- return {}
34
-
35
- settings = load_settings()
36
- ngrok_token = settings.get('ngrok_token', "")
37
- zrok_token = settings.get('zrok_token', "")
38
- commandline_arguments = settings.get('commandline_arguments', "")
39
- change_webui = settings.get('change_webui', "")
40
-
41
- # ======================== TUNNEL V2 ========================
42
- print('Please Wait...')
43
-
44
- def get_public_ip(version='ipv4'):
45
- try:
46
- url = f'https://api64.ipify.org?format=json&{version}=true'
47
- response = requests.get(url)
48
- return response.json().get('ip', 'N/A')
49
- except Exception as e:
50
- print(f"Error getting public {version} address:", e)
51
-
52
- # Check if public IP is already saved, if not then get it
53
- public_ip_file = f"{root_path}/public_ip.txt"
54
- if os.path.exists(public_ip_file):
55
- with open(public_ip_file, 'r') as file:
56
- public_ipv4 = file.read().strip()
57
- else:
58
- public_ipv4 = get_public_ip(version='ipv4')
59
- with open(public_ip_file, 'w') as file:
60
- file.write(public_ipv4)
61
-
62
- tunnel_class = pickle.load(open(f"{root_path}/new_tunnel", "rb"), encoding="utf-8")
63
- tunnel_port = 1834
64
- tunnel = tunnel_class(tunnel_port)
65
- tunnel.add_tunnel(command="cl tunnel --url localhost:{port}", name="cl", pattern=re.compile(r"[\w-]+\.trycloudflare\.com"))
66
- tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\w-]+\.loca\.lt"), note="Password : " + "\033[32m" + public_ipv4 + "\033[0m" + " rerun cell if 404 error.")
67
-
68
- if zrok_token:
69
- get_ipython().system('zrok enable {zrok_token} &> /dev/null')
70
- tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
71
-
72
- clear_output()
73
-
74
- # =============== Automatic Fixing Path V3 ===============
75
- paths_to_check = {
76
- "tagger_hf_cache_dir": f"{webui_path}/models/interrogators/",
77
- "additional_networks_extra_lora_path": f"{webui_path}/models/Lora/",
78
- "ad_extra_models_dir": f"{webui_path}/models/adetailer/",
79
- "sd_checkpoint_hash": "",
80
- "sd_model_checkpoint": "",
81
- "sd_vae": "None"
82
- }
83
-
84
- config_path = f'{webui_path}/ui-config.json'
85
-
86
- if os.path.exists(config_path):
87
- with open(config_path, 'r') as file:
88
- config_data = json.load(file)
89
-
90
- for key, value in paths_to_check.items():
91
- if key in config_data and config_data[key] != value:
92
- sed_command = f"sed -i 's|\"{key}\": \".*\"|\"{key}\": \"{value}\"|' {config_path}"
93
- os.system(sed_command)
94
-
95
- if env == 'Kaggle':
96
- get_ipython().system('sed -i \'s/"civitai_interface\\/NSFW content\\/value":.*/"civitai_interface\\/NSFW content\\/value": false/g\' {webui_path}/ui-config.json')
97
-
98
- with tunnel:
99
- get_ipython().run_line_magic('cd', '{webui_path}')
100
-
101
- commandline_arguments += f' --port={tunnel_port}'
102
- if ngrok_token:
103
- commandline_arguments += f' --ngrok {ngrok_token}'
104
- if env != "Google Colab":
105
- commandline_arguments += f' --encrypt-pass={tunnel_port} --api'
106
-
107
- if change_webui == 'Forge':
108
- commandline_arguments += ' --cuda-stream --pin-shared-memory'
109
-
110
- get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
111
-
112
- start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
113
- time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
114
- print(f"\n⌚️ \033[0mВы проводите эту сессию в течение - \033[33m{time_since_start}\033[0m\n\n")
115
-
116
- if zrok_token:
117
- get_ipython().system('zrok disable &> /dev/null')
118
-
 
1
+ ##~ LAUNCH CODE | BY: ANXETY ~##
2
+
3
+ import os
4
+ import re
5
+ import time
6
+ import json
7
+ import requests
8
+ import cloudpickle as pickle
9
+ from datetime import timedelta
10
+ from IPython.display import clear_output
11
+
12
+ # ================= DETECT ENV =================
13
+ def detect_environment():
14
+ free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
15
+ environments = {
16
+ 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
17
+ 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
18
+ }
19
+
20
+ for env_var, (environment, path) in environments.items():
21
+ if env_var in os.environ:
22
+ return environment, path, free_plan
23
+ return 'Unknown', '/unknown/path', free_plan
24
+
25
+ env, root_path, free_plan = detect_environment()
26
+ webui_path = f"{root_path}/sdw"
27
+
28
+ def load_settings():
29
+ SETTINGS_FILE = f'{root_path}/settings.json'
30
+ if os.path.exists(SETTINGS_FILE):
31
+ with open(SETTINGS_FILE, 'r') as f:
32
+ return json.load(f)
33
+ return {}
34
+
35
+ settings = load_settings()
36
+ ngrok_token = settings.get('ngrok_token', "")
37
+ zrok_token = settings.get('zrok_token', "")
38
+ commandline_arguments = settings.get('commandline_arguments', "")
39
+ change_webui = settings.get('change_webui', "")
40
+
41
+ # ======================== TUNNEL V2 ========================
42
+ print('Please Wait...')
43
+
44
+ def get_public_ip(version='ipv4'):
45
+ try:
46
+ url = f'https://api64.ipify.org?format=json&{version}=true'
47
+ response = requests.get(url)
48
+ return response.json().get('ip', 'N/A')
49
+ except Exception as e:
50
+ print(f"Error getting public {version} address:", e)
51
+
52
+ # Check if public IP is already saved, if not then get it
53
+ public_ip_file = f"{root_path}/public_ip.txt"
54
+ if os.path.exists(public_ip_file):
55
+ with open(public_ip_file, 'r') as file:
56
+ public_ipv4 = file.read().strip()
57
+ else:
58
+ public_ipv4 = get_public_ip(version='ipv4')
59
+ with open(public_ip_file, 'w') as file:
60
+ file.write(public_ipv4)
61
+
62
+ tunnel_class = pickle.load(open(f"{root_path}/new_tunnel", "rb"), encoding="utf-8")
63
+ tunnel_port = 1834
64
+ tunnel = tunnel_class(tunnel_port)
65
+ tunnel.add_tunnel(command="cl tunnel --url localhost:{port}", name="cl", pattern=re.compile(r"[\w-]+\.trycloudflare\.com"))
66
+ tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\w-]+\.loca\.lt"), note="Password : " + "\033[32m" + public_ipv4 + "\033[0m" + " rerun cell if 404 error.")
67
+
68
+ if zrok_token:
69
+ get_ipython().system('zrok enable {zrok_token} &> /dev/null')
70
+ tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
71
+
72
+ clear_output()
73
+
74
+ # =============== Automatic Fixing Path V3 ===============
75
+ paths_to_check = {
76
+ "tagger_hf_cache_dir": f"{webui_path}/models/interrogators/",
77
+ "additional_networks_extra_lora_path": f"{webui_path}/models/Lora/",
78
+ "ad_extra_models_dir": f"{webui_path}/models/adetailer/",
79
+ "sd_checkpoint_hash": "",
80
+ "sd_model_checkpoint": "",
81
+ "sd_vae": "None"
82
+ }
83
+
84
+ config_path = f'{webui_path}/config.json'
85
+
86
+ if os.path.exists(config_path):
87
+ with open(config_path, 'r') as file:
88
+ config_data = json.load(file)
89
+
90
+ for key, value in paths_to_check.items():
91
+ if key in config_data and config_data[key] != value:
92
+ sed_command = f"sed -i 's|\"{key}\": \".*\"|\"{key}\": \"{value}\"|' {config_path}"
93
+ os.system(sed_command)
94
+
95
+ if env == 'Kaggle':
96
+ get_ipython().system('sed -i \'s|"civitai_interface NSFW content":.*|"civitai_interface NSFW content": false,|\' {webui_path}/ui-config.json')
97
+
98
+ with tunnel:
99
+ get_ipython().run_line_magic('cd', '{webui_path}')
100
+
101
+ commandline_arguments += f' --port={tunnel_port}'
102
+ if ngrok_token:
103
+ commandline_arguments += f' --ngrok {ngrok_token}'
104
+ if env != "Google Colab":
105
+ commandline_arguments += f' --encrypt-pass={tunnel_port} --api'
106
+
107
+ if change_webui == 'Forge':
108
+ commandline_arguments += ' --cuda-stream --pin-shared-memory'
109
+
110
+ get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
111
+
112
+ start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
113
+ time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
114
+ print(f"\n⌚️ \033[0mВы проводите эту сессию в течение - \033[33m{time_since_start}\033[0m\n\n")
115
+
116
+ if zrok_token:
117
+ get_ipython().system('zrok disable &> /dev/null')
118
+