Spaces:
Runtime error
Runtime error
File size: 1,363 Bytes
8249096 ff95fa8 16c6ef6 ff95fa8 9b9f2a4 7a4a025 9b9f2a4 7a4a025 9b9f2a4 7a4a025 9b9f2a4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
import gradio as gr
title="Swin Transformer"
description="Gradio Demo for Swin Transformer: Hierarchical Vision Transformer using Shifted Windows. To use it, simply upload your image, or click one of the examples to load them. Read more at the links below."
article = "<p style='text-align: center'><a href='https://arxiv.org/abs/2103.14030' target='_blank'>Swin Transformer: Hierarchical Vision Transformer using Shifted Windows</a> | <a href='https://github.com/microsoft/Swin-Transformer' target='_blank'>Github Repo</a></p>"
io1 = gr.Interface.load("huggingface/microsoft/swin-large-patch4-window12-384-in22k")
#io2 = gr.Interface.load("huggingface/microsoft/swin-base-patch4-window7-224-in22k")
def inference(image, model):
if model == "swin-large-patch4-window12-384-in22k":
outtext = io1(image)
else:
outtext = io2(image)
return outtext
examples=[['tiger.jpeg','swin-large-patch4-window12-384-in22k']]
gr.Interface(
inference,
[gr.inputs.Image(label="Input Image"),gr.inputs.Dropdown(choices=["swin-large-patch4-window12-384-in22k"], type="value", default="swin-large-patch4-window12-384-in22k", label="model")
],
gr.outputs.Label(label="Classification"),
examples=examples,
article=article,
title=title,
description=description).launch(enable_queue=True, cache_examples=True) |