muzairkhattak commited on
Commit
a0660ee
1 Parent(s): b063f78

minor edits

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -83,14 +83,14 @@ pipes = {
83
  }
84
  # Define Gradio inputs and outputs
85
  inputs = [
86
- gr.inputs.Image(type="pil", label="Image"),
87
- gr.inputs.Textbox(label="Candidate Labels (comma-separated)"),
88
- gr.inputs.Radio(
89
  choices=["ViT/B-16", "ViT/L-14@336px-base-text"],
90
  label="Model",
91
  value="ViT/B-16",
92
  ),
93
- gr.inputs.Textbox(label="Prompt Template", placeholder="Optional prompt template as prefix",
94
  value=""),
95
  ]
96
  outputs = gr.Label(label="Predicted Scores")
@@ -129,7 +129,8 @@ iface = gr.Interface(shot,
129
  description="""<p>Demo for UniMed CLIP, a family of strong Medical Contrastive VLMs trained on UniMed-dataset. For more information about our project, refer to our paper and github repository. <br>
130
  Paper: <a href='https://arxiv.org/abs/2412.10372'>https://arxiv.org/abs/2412.10372</a> <br>
131
  Github: <a href='https://github.com/mbzuai-oryx/UniMed-CLIP'>https://github.com/mbzuai-oryx/UniMed-CLIP</a> <br><br>
132
- <b>[DEMO USAGE]</b> To begin with the demo, provide a picture (either upload manually, or select from the given examples) and class labels. Optionally you can also add template as an prefix to the class labels. <br> </p>""",
 
133
  title="Zero-shot Medical Image Classification with UniMed-CLIP")
134
 
135
- iface.launch()
 
83
  }
84
  # Define Gradio inputs and outputs
85
  inputs = [
86
+ gr.Image(type="pil", label="Image", width=300, height=300),
87
+ gr.Textbox(label="Candidate Labels (comma-separated)"),
88
+ gr.Radio(
89
  choices=["ViT/B-16", "ViT/L-14@336px-base-text"],
90
  label="Model",
91
  value="ViT/B-16",
92
  ),
93
+ gr.Textbox(label="Prompt Template", placeholder="Optional prompt template as prefix",
94
  value=""),
95
  ]
96
  outputs = gr.Label(label="Predicted Scores")
 
129
  description="""<p>Demo for UniMed CLIP, a family of strong Medical Contrastive VLMs trained on UniMed-dataset. For more information about our project, refer to our paper and github repository. <br>
130
  Paper: <a href='https://arxiv.org/abs/2412.10372'>https://arxiv.org/abs/2412.10372</a> <br>
131
  Github: <a href='https://github.com/mbzuai-oryx/UniMed-CLIP'>https://github.com/mbzuai-oryx/UniMed-CLIP</a> <br><br>
132
+ <b>[DEMO USAGE]</b> To begin with the demo, provide a picture (either upload manually, or select from the given examples) and class labels. Optionally you can also add template as an prefix to the class labels. <br> <b>[NOTE]</b> This demo is running on CPU and thus the response time might be a bit slower. Running it on a machine with a GPU will result in much faster predictions. </p>""",
133
+
134
  title="Zero-shot Medical Image Classification with UniMed-CLIP")
135
 
136
+ iface.launch(allowed_paths=["/home/user/app/docs/sample_images"])