Update app.py
Browse files
app.py
CHANGED
@@ -13,10 +13,18 @@ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
13 |
|
14 |
vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
|
15 |
|
|
|
|
|
|
|
|
|
|
|
16 |
pipe = StableDiffusionXLPipeline.from_pretrained(
|
17 |
-
"yodayo-ai/
|
18 |
-
|
19 |
-
|
|
|
|
|
|
|
20 |
)
|
21 |
|
22 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
@@ -59,9 +67,10 @@ with gr.Blocks(css=css) as demo:
|
|
59 |
with gr.Column(elem_id="col-container"):
|
60 |
gr.Markdown("""
|
61 |
# Text-to-Image Demo
|
62 |
-
using [
|
63 |
""")
|
64 |
-
|
|
|
65 |
with gr.Row():
|
66 |
prompt = gr.Text(
|
67 |
label="Prompt",
|
|
|
13 |
|
14 |
vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
|
15 |
|
16 |
+
#pipe = StableDiffusionXLPipeline.from_pretrained(
|
17 |
+
# "yodayo-ai/holodayo-xl-2.1",
|
18 |
+
# vae=vae,
|
19 |
+
# torch_dtype=torch.float16,
|
20 |
+
#)
|
21 |
pipe = StableDiffusionXLPipeline.from_pretrained(
|
22 |
+
"yodayo-ai/clandestine-xl-1.0",
|
23 |
+
torch_dtype=torch.float16,
|
24 |
+
use_safetensors=True,
|
25 |
+
custom_pipeline="lpw_stable_diffusion_xl",
|
26 |
+
add_watermarker=False,
|
27 |
+
variant="fp16"
|
28 |
)
|
29 |
|
30 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
|
|
67 |
with gr.Column(elem_id="col-container"):
|
68 |
gr.Markdown("""
|
69 |
# Text-to-Image Demo
|
70 |
+
using [clandestine XL 1.0](https://huggingface.co/yodayo-ai/clandestine-xl-1.0)
|
71 |
""")
|
72 |
+
#yodayo-ai/clandestine-xl-1.0
|
73 |
+
#yodayo-ai/holodayo-xl-2.1
|
74 |
with gr.Row():
|
75 |
prompt = gr.Text(
|
76 |
label="Prompt",
|