MWilinski's picture
feat: better placeholder
526afc6
raw
history blame
698 Bytes
import gradio as gr
import cv2
from PIL import Image
import numpy as np
from gradio import components
def segment_and_show(image):
image = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR)
# TODO: Implement segmentation logic here
return Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
default_image = Image.open("demo.jpeg")
iface = gr.Interface(
fn=segment_and_show,
inputs=components.Image(value=default_image),
outputs=components.Image(type="pil"),
title="Urban Autonomy Instance Segmentation Demo",
description="Upload an image or use the default to see the instance segmentation model in action.",
)
if __name__ == "__main__":
iface.launch()