Spaces:
Runtime error
Runtime error
import gradio as gr | |
import torch | |
from torch import nn | |
from torch.nn import functional as F | |
import torchvision | |
from torchvision import transforms | |
model= torch.jit.load('best.pt') | |
data_transform1=transforms.Compose([ | |
transforms.Resize((640,640)), | |
transforms.ToTensor(), | |
transforms.Normalize((0.485,0.456,0.406),(0.229,0.224,0.225)) | |
]) | |
title = " Fashion Items Classification" | |
examples=[['https://github.com/Kr1n3/MPC_2022/blob/main/dataset/pants_33.jpg?raw=true'],['https://github.com/Kr1n3/MPC_2022/blob/main/dataset/pants_30.jpeg?raw=true'],['https://github.com/Kr1n3/MPC_2022/blob/main/dataset/bag_01.jpg?raw=true']] | |
classes=['Bags','Dress','Pants','Shoes','Skirt'] | |
def predict(img): | |
imag=data_transform1(img) | |
inp =imag.unsqueeze(0) | |
outputs=model(inp) | |
pred=F.softmax(outputs[0], dim=0).cpu().data.numpy() | |
confidences = {classes[i]:(float(pred[i])) for i in range(5)} | |
return confidences | |
gr.Interface(predict,gr.inputs.Image(type='pil'),title=title,examples=examples,outputs='label').launch(debug=True) | |