Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -11,14 +11,16 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
11 |
dtype = torch.float16
|
12 |
|
13 |
repo = "dataautogpt3/OpenDalleV1.1"
|
|
|
|
|
14 |
# pipe = StableDiffusion3Pipeline.from_pretrained(repo, torch_dtype=torch.float16).to(device)
|
15 |
-
pipeline = AutoPipelineForText2Image.from_pretrained(
|
16 |
|
17 |
|
18 |
MAX_SEED = np.iinfo(np.int32).max
|
19 |
MAX_IMAGE_SIZE = 1344
|
20 |
|
21 |
-
@spaces.GPU(duration=
|
22 |
def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
|
23 |
|
24 |
if randomize_seed:
|
|
|
11 |
dtype = torch.float16
|
12 |
|
13 |
repo = "dataautogpt3/OpenDalleV1.1"
|
14 |
+
repo = "stabilityai/stable-diffusion-3-medium"
|
15 |
+
|
16 |
# pipe = StableDiffusion3Pipeline.from_pretrained(repo, torch_dtype=torch.float16).to(device)
|
17 |
+
pipeline = AutoPipelineForText2Image.from_pretrained(repo, torch_dtype=torch.float16).to('cuda')
|
18 |
|
19 |
|
20 |
MAX_SEED = np.iinfo(np.int32).max
|
21 |
MAX_IMAGE_SIZE = 1344
|
22 |
|
23 |
+
@spaces.GPU(duration=160)
|
24 |
def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
|
25 |
|
26 |
if randomize_seed:
|