Update app.py
Browse files
app.py
CHANGED
@@ -26,7 +26,7 @@ if torch.cuda.is_available():
|
|
26 |
add_watermarker=False,
|
27 |
)
|
28 |
# pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
|
29 |
-
pipe.tokenizer.model_max_length = 512
|
30 |
pipe.to("cuda")
|
31 |
|
32 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
@@ -77,10 +77,10 @@ def infer(
|
|
77 |
negative_pooled_prompt_embeds = None
|
78 |
|
79 |
image = pipe(
|
80 |
-
|
81 |
-
|
82 |
-
pooled_prompt_embeds=pooled_prompt_embeds,
|
83 |
-
negative_pooled_prompt_embeds=negative_pooled_prompt_embeds,
|
84 |
width=width,
|
85 |
height=height,
|
86 |
guidance_scale=guidance_scale,
|
|
|
26 |
add_watermarker=False,
|
27 |
)
|
28 |
# pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
|
29 |
+
# pipe.tokenizer.model_max_length = 512
|
30 |
pipe.to("cuda")
|
31 |
|
32 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
|
|
77 |
negative_pooled_prompt_embeds = None
|
78 |
|
79 |
image = pipe(
|
80 |
+
prompt=prompt_embeds,
|
81 |
+
negative_prompt=prompt_neg_embeds,
|
82 |
+
#pooled_prompt_embeds=pooled_prompt_embeds,
|
83 |
+
#negative_pooled_prompt_embeds=negative_pooled_prompt_embeds,
|
84 |
width=width,
|
85 |
height=height,
|
86 |
guidance_scale=guidance_scale,
|