Spaces:
nebula
/
Running on Zero

Menyu commited on
Commit
fc967fc
1 Parent(s): dfa6244

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -7
app.py CHANGED
@@ -11,7 +11,7 @@ import torch
11
  from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
12
 
13
  if not torch.cuda.is_available():
14
- DESCRIPTION += "\n<p>Running on CPU 🥶 This demo may not work on CPU.</p>"
15
 
16
  MAX_SEED = np.iinfo(np.int32).max
17
  CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "1") == "1"
@@ -54,7 +54,7 @@ def generate(
54
  seed = int(randomize_seed_fn(seed, randomize_seed))
55
  generator = torch.Generator().manual_seed(seed)
56
 
57
- options = {
58
  "prompt":prompt,
59
  "negative_prompt":negative_prompt,
60
  "width":width,
@@ -64,12 +64,9 @@ def generate(
64
  "generator":generator,
65
  "use_resolution_binning":use_resolution_binning,
66
  "output_type":"pil",
 
67
 
68
- }
69
-
70
- images = pipe(**options).images[0]
71
-
72
- return images, seed
73
 
74
 
75
  examples = [
 
11
  from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
12
 
13
  if not torch.cuda.is_available():
14
+ DESCRIPTION += "\n<p>你现在运行在CPU 但是只支持GPU.</p>"
15
 
16
  MAX_SEED = np.iinfo(np.int32).max
17
  CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "1") == "1"
 
54
  seed = int(randomize_seed_fn(seed, randomize_seed))
55
  generator = torch.Generator().manual_seed(seed)
56
 
57
+ image= pipe(
58
  "prompt":prompt,
59
  "negative_prompt":negative_prompt,
60
  "width":width,
 
64
  "generator":generator,
65
  "use_resolution_binning":use_resolution_binning,
66
  "output_type":"pil",
67
+ ).images[0]
68
 
69
+ return image, seed
 
 
 
 
70
 
71
 
72
  examples = [