Spaces:
Menyu
/
Running on Zero

Menyu commited on
Commit
8c287bb
1 Parent(s): e0f9b66

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -3
app.py CHANGED
@@ -3,7 +3,7 @@ import gradio as gr
3
  import numpy as np
4
  import spaces
5
  import torch
6
- from diffusers import AutoPipelineForText2Image, EulerDiscreteScheduler
7
 
8
  if not torch.cuda.is_available():
9
  DESCRIPTION += "\n<p>你现在运行在CPU上 但是只支持GPU.</p>"
@@ -12,13 +12,17 @@ MAX_SEED = np.iinfo(np.int32).max
12
  MAX_IMAGE_SIZE = 4096
13
 
14
  if torch.cuda.is_available():
 
15
  pipe = AutoPipelineForText2Image.from_pretrained(
16
  "WhiteAiZ/noobaiXLNAIXL_epsilonPred075_diffusers",
 
17
  torch_dtype=torch.float16,
18
  use_safetensors=True,
19
- add_watermarker=False
 
20
  )
21
- pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
 
22
  pipe.to("cuda")
23
 
24
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
 
3
  import numpy as np
4
  import spaces
5
  import torch
6
+ from diffusers import AutoPipelineForText2Image, AutoencoderKL #,EulerDiscreteScheduler
7
 
8
  if not torch.cuda.is_available():
9
  DESCRIPTION += "\n<p>你现在运行在CPU上 但是只支持GPU.</p>"
 
12
  MAX_IMAGE_SIZE = 4096
13
 
14
  if torch.cuda.is_available():
15
+ vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
16
  pipe = AutoPipelineForText2Image.from_pretrained(
17
  "WhiteAiZ/noobaiXLNAIXL_epsilonPred075_diffusers",
18
+ vae=vae,
19
  torch_dtype=torch.float16,
20
  use_safetensors=True,
21
+ add_watermarker=False,
22
+ variant="fp16"
23
  )
24
+
25
+ # pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
26
  pipe.to("cuda")
27
 
28
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int: