Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -5,10 +5,21 @@ import PIL.Image
|
|
5 |
from PIL import Image
|
6 |
import random
|
7 |
from diffusers import ControlNetModel, StableDiffusionXLPipeline, AutoencoderKL
|
8 |
-
from diffusers import DDIMScheduler, EulerAncestralDiscreteScheduler
|
9 |
import cv2
|
10 |
import torch
|
11 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
14 |
|
@@ -36,8 +47,31 @@ MAX_IMAGE_SIZE = 1216
|
|
36 |
|
37 |
|
38 |
@spaces.GPU
|
39 |
-
def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
|
|
|
41 |
if randomize_seed:
|
42 |
seed = random.randint(0, MAX_SEED)
|
43 |
|
@@ -102,6 +136,12 @@ with gr.Blocks(css=css) as demo:
|
|
102 |
value=0,
|
103 |
)
|
104 |
|
|
|
|
|
|
|
|
|
|
|
|
|
105 |
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
106 |
|
107 |
with gr.Row():
|
@@ -138,9 +178,11 @@ with gr.Blocks(css=css) as demo:
|
|
138 |
value=28,
|
139 |
)
|
140 |
|
|
|
|
|
141 |
run_button.click(#lambda x: None, inputs=None, outputs=result).then(
|
142 |
fn=infer,
|
143 |
-
inputs=[prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
|
144 |
outputs=[result]
|
145 |
)
|
146 |
|
|
|
5 |
from PIL import Image
|
6 |
import random
|
7 |
from diffusers import ControlNetModel, StableDiffusionXLPipeline, AutoencoderKL
|
|
|
8 |
import cv2
|
9 |
import torch
|
10 |
|
11 |
+
from diffusers import (
|
12 |
+
DDIMScheduler,
|
13 |
+
DPMSolverMultistepScheduler,
|
14 |
+
EulerDiscreteScheduler,
|
15 |
+
EulerAncestralDiscreteScheduler,
|
16 |
+
HeunDiscreteScheduler,
|
17 |
+
KDPM2DiscreteScheduler,
|
18 |
+
KDPM2AncestralDiscreteScheduler,
|
19 |
+
LMSDiscreteScheduler,
|
20 |
+
UniPCMultistepScheduler,
|
21 |
+
)
|
22 |
+
|
23 |
|
24 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
25 |
|
|
|
47 |
|
48 |
|
49 |
@spaces.GPU
|
50 |
+
def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, sampler_name):
|
51 |
+
|
52 |
+
# サンプラーの設定
|
53 |
+
if sampler_name == "DDIM":
|
54 |
+
pipe.scheduler = DDIMScheduler.from_config(pipe.scheduler.config)
|
55 |
+
elif sampler_name == "DPMSolverMultistep":
|
56 |
+
pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config)
|
57 |
+
elif sampler_name == "Euler":
|
58 |
+
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
|
59 |
+
elif sampler_name == "EulerAncestral":
|
60 |
+
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
61 |
+
elif sampler_name == "Heun":
|
62 |
+
pipe.scheduler = HeunDiscreteScheduler.from_config(pipe.scheduler.config)
|
63 |
+
elif sampler_name == "KDPM2":
|
64 |
+
pipe.scheduler = KDPM2DiscreteScheduler.from_config(pipe.scheduler.config)
|
65 |
+
elif sampler_name == "KDPM2Ancestral":
|
66 |
+
pipe.scheduler = KDPM2AncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
67 |
+
elif sampler_name == "LMS":
|
68 |
+
pipe.scheduler = LMSDiscreteScheduler.from_config(pipe.scheduler.config)
|
69 |
+
elif sampler_name == "UniPC":
|
70 |
+
pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)
|
71 |
+
else:
|
72 |
+
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
73 |
|
74 |
+
|
75 |
if randomize_seed:
|
76 |
seed = random.randint(0, MAX_SEED)
|
77 |
|
|
|
136 |
value=0,
|
137 |
)
|
138 |
|
139 |
+
sampler_name = gr.Dropdown(
|
140 |
+
label="Sampler",
|
141 |
+
choices=["DDIM", "DPMSolverMultistep", "Euler", "EulerAncestral", "Heun", "KDPM2", "KDPM2Ancestral", "LMS", "UniPC"],
|
142 |
+
value="EulerAncestral",
|
143 |
+
)
|
144 |
+
|
145 |
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
146 |
|
147 |
with gr.Row():
|
|
|
178 |
value=28,
|
179 |
)
|
180 |
|
181 |
+
|
182 |
+
|
183 |
run_button.click(#lambda x: None, inputs=None, outputs=result).then(
|
184 |
fn=infer,
|
185 |
+
inputs=[prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, sampler_name],
|
186 |
outputs=[result]
|
187 |
)
|
188 |
|