jbilcke-hf HF staff commited on
Commit
82378b1
·
1 Parent(s): 0810225

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -14
app.py CHANGED
@@ -16,8 +16,6 @@ if not torch.cuda.is_available():
16
  DESCRIPTION += '\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>'
17
 
18
  MAX_SEED = np.iinfo(np.int32).max
19
- CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv(
20
- 'CACHE_EXAMPLES') == '1'
21
  MAX_IMAGE_SIZE = int(os.getenv('MAX_IMAGE_SIZE', '1024'))
22
  USE_TORCH_COMPILE = os.getenv('USE_TORCH_COMPILE') == '1'
23
  ENABLE_CPU_OFFLOAD = os.getenv('ENABLE_CPU_OFFLOAD') == '1'
@@ -118,12 +116,6 @@ def generate(prompt: str,
118
  generator=generator).images[0]
119
  return image
120
 
121
-
122
- examples = [
123
- 'Astronaut in a jungle, cold color palette, muted colors, detailed, 8k',
124
- 'An astronaut riding a green horse',
125
- ]
126
-
127
  with gr.Blocks(css='style.css') as demo:
128
  gr.Markdown(DESCRIPTION)
129
  with gr.Box():
@@ -217,12 +209,6 @@ with gr.Blocks(css='style.css') as demo:
217
  step=1,
218
  value=50)
219
 
220
- gr.Examples(examples=examples,
221
- inputs=prompt,
222
- outputs=result,
223
- fn=generate,
224
- cache_examples=CACHE_EXAMPLES)
225
-
226
  use_negative_prompt.change(
227
  fn=lambda x: gr.update(visible=x),
228
  inputs=use_negative_prompt,
 
16
  DESCRIPTION += '\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>'
17
 
18
  MAX_SEED = np.iinfo(np.int32).max
 
 
19
  MAX_IMAGE_SIZE = int(os.getenv('MAX_IMAGE_SIZE', '1024'))
20
  USE_TORCH_COMPILE = os.getenv('USE_TORCH_COMPILE') == '1'
21
  ENABLE_CPU_OFFLOAD = os.getenv('ENABLE_CPU_OFFLOAD') == '1'
 
116
  generator=generator).images[0]
117
  return image
118
 
 
 
 
 
 
 
119
  with gr.Blocks(css='style.css') as demo:
120
  gr.Markdown(DESCRIPTION)
121
  with gr.Box():
 
209
  step=1,
210
  value=50)
211
 
 
 
 
 
 
 
212
  use_negative_prompt.change(
213
  fn=lambda x: gr.update(visible=x),
214
  inputs=use_negative_prompt,