Spaces:
Paused
Paused
gokaygokay
commited on
Commit
•
14ee7bd
1
Parent(s):
c93bbf7
Update app.py
Browse files
app.py
CHANGED
@@ -191,7 +191,12 @@ def gradio_process_image(input_image, resolution, num_inference_steps, strength,
|
|
191 |
print("Running inference...")
|
192 |
result = lazy_pipe(**options).images[0]
|
193 |
print("Image processing completed successfully")
|
194 |
-
|
|
|
|
|
|
|
|
|
|
|
195 |
|
196 |
title = """<h1 align="center">Image Upscaler with Tile Controlnet</h1>
|
197 |
<p align="center">The main ideas come from</p>
|
@@ -208,7 +213,7 @@ with gr.Blocks() as demo:
|
|
208 |
input_image = gr.Image(type="pil", label="Input Image")
|
209 |
run_button = gr.Button("Enhance Image")
|
210 |
with gr.Column():
|
211 |
-
|
212 |
with gr.Accordion("Advanced Options", open=False):
|
213 |
resolution = gr.Slider(minimum=256, maximum=2048, value=512, step=256, label="Resolution")
|
214 |
num_inference_steps = gr.Slider(minimum=1, maximum=50, value=20, step=1, label="Number of Inference Steps")
|
@@ -216,6 +221,10 @@ with gr.Blocks() as demo:
|
|
216 |
hdr = gr.Slider(minimum=0, maximum=1, value=0, step=0.1, label="HDR Effect")
|
217 |
guidance_scale = gr.Slider(minimum=0, maximum=20, value=3, step=0.5, label="Guidance Scale")
|
218 |
|
|
|
|
|
|
|
|
|
219 |
# Add examples with all required inputs
|
220 |
gr.Examples(
|
221 |
examples=[
|
@@ -224,13 +233,9 @@ with gr.Blocks() as demo:
|
|
224 |
["image3.png", 512, 20, 0.4, 0, 3],
|
225 |
],
|
226 |
inputs=[input_image, resolution, num_inference_steps, strength, hdr, guidance_scale],
|
227 |
-
outputs=
|
228 |
fn=gradio_process_image,
|
229 |
cache_examples=True,
|
230 |
)
|
231 |
|
232 |
-
run_button.click(fn=gradio_process_image,
|
233 |
-
inputs=[input_image, resolution, num_inference_steps, strength, hdr, guidance_scale],
|
234 |
-
outputs=output_image)
|
235 |
-
|
236 |
demo.launch(share=True)
|
|
|
191 |
print("Running inference...")
|
192 |
result = lazy_pipe(**options).images[0]
|
193 |
print("Image processing completed successfully")
|
194 |
+
|
195 |
+
# Convert input_image and result to numpy arrays
|
196 |
+
input_array = np.array(input_image)
|
197 |
+
result_array = np.array(result)
|
198 |
+
|
199 |
+
return [input_array, result_array]
|
200 |
|
201 |
title = """<h1 align="center">Image Upscaler with Tile Controlnet</h1>
|
202 |
<p align="center">The main ideas come from</p>
|
|
|
213 |
input_image = gr.Image(type="pil", label="Input Image")
|
214 |
run_button = gr.Button("Enhance Image")
|
215 |
with gr.Column():
|
216 |
+
output_slider = ImageSlider(label="Before / After", type="numpy")
|
217 |
with gr.Accordion("Advanced Options", open=False):
|
218 |
resolution = gr.Slider(minimum=256, maximum=2048, value=512, step=256, label="Resolution")
|
219 |
num_inference_steps = gr.Slider(minimum=1, maximum=50, value=20, step=1, label="Number of Inference Steps")
|
|
|
221 |
hdr = gr.Slider(minimum=0, maximum=1, value=0, step=0.1, label="HDR Effect")
|
222 |
guidance_scale = gr.Slider(minimum=0, maximum=20, value=3, step=0.5, label="Guidance Scale")
|
223 |
|
224 |
+
run_button.click(fn=gradio_process_image,
|
225 |
+
inputs=[input_image, resolution, num_inference_steps, strength, hdr, guidance_scale],
|
226 |
+
outputs=output_slider)
|
227 |
+
|
228 |
# Add examples with all required inputs
|
229 |
gr.Examples(
|
230 |
examples=[
|
|
|
233 |
["image3.png", 512, 20, 0.4, 0, 3],
|
234 |
],
|
235 |
inputs=[input_image, resolution, num_inference_steps, strength, hdr, guidance_scale],
|
236 |
+
outputs=output_slider,
|
237 |
fn=gradio_process_image,
|
238 |
cache_examples=True,
|
239 |
)
|
240 |
|
|
|
|
|
|
|
|
|
241 |
demo.launch(share=True)
|