Spaces:
Runtime error
Runtime error
File size: 1,034 Bytes
77f5256 87bb394 77f5256 87bb394 77f5256 87bb394 77f5256 87bb394 77f5256 87bb394 77f5256 87bb394 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 |
import gradio as gr
import torch
from torch import nn
from torch.nn import functional as F
import torchvision
from torchvision import transforms
model= torch.jit.load('best.pt')
data_transform1=transforms.Compose([
transforms.Resize((640,640)),
transforms.ToTensor(),
transforms.Normalize((0.485,0.456,0.406),(0.229,0.224,0.225))
])
title = " Fashion Items Classification"
examples=[['https://github.com/Kr1n3/MPC_2022/blob/main/dataset/pants_33.jpg?raw=true'],['https://github.com/Kr1n3/MPC_2022/blob/main/dataset/pants_30.jpeg?raw=true'],['https://github.com/Kr1n3/MPC_2022/blob/main/dataset/bag_01.jpg?raw=true']]
classes=['Bags','Dress','Pants','Shoes','Skirt']
def predict(img):
imag=data_transform1(img)
inp =imag.unsqueeze(0)
outputs=model(inp)
pred=F.softmax(outputs[0], dim=0).cpu().data.numpy()
confidences = {classes[i]:(float(pred[i])) for i in range(5)}
return confidences
gr.Interface(predict,gr.inputs.Image(type='pil'),title=title,examples=examples,outputs='label').launch(debug=True)
|