Spaces:
Runtime error
Runtime error
File size: 1,929 Bytes
23cf698 29a378b ceb927d 23cf698 ceb927d 529c2af ceb927d 5119d09 de2e2ab 23cf698 ceb927d 23cf698 ceb927d 23cf698 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
from turtle import title
import gradio as gr
from transformers import pipeline
import numpy as np
from PIL import Image
pipes = {
"chinese-clip-vit-base-patch16": pipeline("zero-shot-image-classification", model="OFA-Sys/chinese-clip-vit-base-patch16"),
"chinese-clip-vit-large-patch14": pipeline("zero-shot-image-classification", model="OFA-Sys/chinese-clip-vit-large-patch14"),
"chinese-clip-vit-large-patch14-336px": pipeline("zero-shot-image-classification", model="OFA-Sys/chinese-clip-vit-large-patch14-336px"),
"chinese-clip-vit-huge-patch14": pipeline("zero-shot-image-classification", model="OFA-Sys/chinese-clip-vit-huge-patch14"),
}
inputs = [
gr.inputs.Image(type='pil'),
"text",
gr.inputs.Radio(choices=[
"chinese-clip-vit-base-patch16",
"chinese-clip-vit-large-patch14",
"chinese-clip-vit-large-patch14-336px",
"chinese-clip-vit-huge-patch14",
], type="value", default="chinese-clip-vit-base-patch16", label="Model"),
]
images="festival.jpg"
def shot(image, labels_text, model_name):
labels = labels_text.strip(" ").split(",").strip(" ")
res = pipes[model_name](images=image,
candidate_labels=labels,
hypothesis_template= "一张{}的图片。")
return {dic["label"]: dic["score"] for dic in res}
iface = gr.Interface(shot,
inputs,
"label",
examples=[["festival.jpg", "灯笼, 鞭炮, 对联"],
["cat-dog-music.png", "音乐表演, 体育运动"],
["football-match.jpg", "梅西, C罗, 马奎尔"]],
description="Add a picture and a list of labels separated by commas",
title="Zero-shot Image Classification")
iface.launch() |