Spaces:
Running
on
Zero
Running
on
Zero
prithivMLmods
commited on
Commit
•
ba069c6
1
Parent(s):
9efc887
Update app.py
Browse files
app.py
CHANGED
@@ -8,6 +8,8 @@ import spaces
|
|
8 |
from diffusers import DiffusionPipeline
|
9 |
import torch
|
10 |
|
|
|
|
|
11 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
12 |
model_repo_id = "stabilityai/stable-diffusion-3.5-large-turbo"
|
13 |
|
@@ -58,7 +60,7 @@ grid_sizes = {
|
|
58 |
"1x1": (1, 1)
|
59 |
}
|
60 |
|
61 |
-
@spaces.GPU
|
62 |
def infer(
|
63 |
prompt,
|
64 |
negative_prompt="",
|
@@ -114,18 +116,16 @@ examples = [
|
|
114 |
"A capybara wearing a suit holding a sign that reads Hello World",
|
115 |
]
|
116 |
|
117 |
-
css =
|
118 |
-
|
119 |
-
|
120 |
-
|
|
|
121 |
}
|
122 |
-
|
123 |
|
124 |
-
with gr.Blocks(css=css) as demo:
|
125 |
-
|
126 |
-
gr.Markdown(" # [Stable Diffusion 3.5 Large Turbo (8B)](https://huggingface.co/stabilityai/stable-diffusion-3.5-large-turbo)")
|
127 |
-
gr.Markdown("[Learn more](https://stability.ai/news/introducing-stable-diffusion-3-5) about the Stable Diffusion 3.5 series.")
|
128 |
-
|
129 |
with gr.Row():
|
130 |
prompt = gr.Text(
|
131 |
label="Prompt",
|
|
|
8 |
from diffusers import DiffusionPipeline
|
9 |
import torch
|
10 |
|
11 |
+
DESCRIPTIONx = """## SD-3.5 LARGE TURBO """
|
12 |
+
|
13 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
14 |
model_repo_id = "stabilityai/stable-diffusion-3.5-large-turbo"
|
15 |
|
|
|
60 |
"1x1": (1, 1)
|
61 |
}
|
62 |
|
63 |
+
@spaces.GPU(duration=60, enable_queue=True)
|
64 |
def infer(
|
65 |
prompt,
|
66 |
negative_prompt="",
|
|
|
116 |
"A capybara wearing a suit holding a sign that reads Hello World",
|
117 |
]
|
118 |
|
119 |
+
css = '''
|
120 |
+
.gradio-container{max-width: 585px !important}
|
121 |
+
h1{text-align:center}
|
122 |
+
footer {
|
123 |
+
visibility: hidden
|
124 |
}
|
125 |
+
'''
|
126 |
|
127 |
+
with gr.Blocks(css=css, theme="prithivMLmods/Minecraft-Theme") as demo:
|
128 |
+
gr.Markdown(DESCRIPTIONx)
|
|
|
|
|
|
|
129 |
with gr.Row():
|
130 |
prompt = gr.Text(
|
131 |
label="Prompt",
|