el-el-san commited on
Commit
0d6069a
·
verified ·
1 Parent(s): 738703f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -43,9 +43,9 @@ MAX_IMAGE_SIZE = 1216
43
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
44
  # Check if the input image is a valid PIL Image and is not empty
45
  use_image = False
46
- image = None
47
 
48
- if use_image and image is not None :
49
  width, height = image['composite'].size
50
  ratio = np.sqrt(1024. * 1024. / (width * height))
51
  new_width, new_height = int(width * ratio), int(height * ratio)
@@ -113,7 +113,7 @@ with gr.Blocks(css=css) as demo:
113
  #image = gr.ImageEditor(type="pil", image_mode="L", crop_size=(512, 512))
114
  result = gr.Image(label="Result", show_label=False)
115
 
116
- use_image = gr.Checkbox(label="Use image", value=True)
117
 
118
  with gr.Accordion("Advanced Settings", open=False):
119
 
@@ -168,7 +168,7 @@ with gr.Blocks(css=css) as demo:
168
  value=28,
169
  )
170
 
171
- run_button.click(lambda x: None, inputs=None, outputs=result).then(
172
  fn=infer,
173
  #inputs=[use_image, prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps,image],
174
  inputs=[prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
 
43
  def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
44
  # Check if the input image is a valid PIL Image and is not empty
45
  use_image = False
46
+ #image = None
47
 
48
+ if use_image :# and image is not None :
49
  width, height = image['composite'].size
50
  ratio = np.sqrt(1024. * 1024. / (width * height))
51
  new_width, new_height = int(width * ratio), int(height * ratio)
 
113
  #image = gr.ImageEditor(type="pil", image_mode="L", crop_size=(512, 512))
114
  result = gr.Image(label="Result", show_label=False)
115
 
116
+ #use_image = gr.Checkbox(label="Use image", value=True)
117
 
118
  with gr.Accordion("Advanced Settings", open=False):
119
 
 
168
  value=28,
169
  )
170
 
171
+ run_button.click(#lambda x: None, inputs=None, outputs=result).then(
172
  fn=infer,
173
  #inputs=[use_image, prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps,image],
174
  inputs=[prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],