Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -11,7 +11,7 @@ import PIL.Image
|
|
11 |
import torch
|
12 |
from diffusers import AutoencoderKL, StableDiffusionXLPipeline
|
13 |
|
14 |
-
DESCRIPTION = "# Segmind Stable Diffusion"
|
15 |
if not torch.cuda.is_available():
|
16 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
|
17 |
|
@@ -49,11 +49,13 @@ if torch.cuda.is_available():
|
|
49 |
pipe.to(device)
|
50 |
if ENABLE_REFINER:
|
51 |
refiner.to(device)
|
|
|
52 |
|
53 |
if USE_TORCH_COMPILE:
|
54 |
pipe.unet = torch.compile(pipe.unet, mode="reduce-overhead", fullgraph=True)
|
55 |
if ENABLE_REFINER:
|
56 |
refiner.unet = torch.compile(refiner.unet, mode="reduce-overhead", fullgraph=True)
|
|
|
57 |
|
58 |
|
59 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
|
|
11 |
import torch
|
12 |
from diffusers import AutoencoderKL, StableDiffusionXLPipeline
|
13 |
|
14 |
+
DESCRIPTION = "# Segmind Stable Diffusion: SSD-1B"
|
15 |
if not torch.cuda.is_available():
|
16 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
|
17 |
|
|
|
49 |
pipe.to(device)
|
50 |
if ENABLE_REFINER:
|
51 |
refiner.to(device)
|
52 |
+
print("Loaded on Device!")
|
53 |
|
54 |
if USE_TORCH_COMPILE:
|
55 |
pipe.unet = torch.compile(pipe.unet, mode="reduce-overhead", fullgraph=True)
|
56 |
if ENABLE_REFINER:
|
57 |
refiner.unet = torch.compile(refiner.unet, mode="reduce-overhead", fullgraph=True)
|
58 |
+
print("Model Compiled!")
|
59 |
|
60 |
|
61 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|