Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
import json | |
import yolov5 | |
# Images | |
torch.hub.download_url_to_file('https://github.com/ultralytics/yolov5/raw/master/data/images/zidane.jpg', 'zidane.jpg') | |
torch.hub.download_url_to_file('https://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/image3.jpg', 'image3.jpg') | |
torch.hub.download_url_to_file('https://github.com/ultralytics/yolov5/releases/download/v5.0/yolov5s.pt','yolov5s.pt') | |
model_path = "yolov5x.pt" #"yolov5s.pt" #"yolov5m.pt", "yolov5l.pt", "yolov5x.pt", | |
image_size = 640, | |
conf_threshold = 0.25, | |
iou_threshold = 0.45, | |
model = yolov5.load(model_path, device="cpu") | |
def yolov5_inference( | |
image: gr.inputs.Image = None, | |
): | |
""" | |
YOLOv5 inference function | |
Args: | |
image: Input image | |
model_path: Path to the model | |
image_size: Image size | |
conf_threshold: Confidence threshold | |
iou_threshold: IOU threshold | |
Returns: | |
Rendered image | |
""" | |
results = model([image], size=image_size) | |
tensor = { | |
"tensorflow": [ | |
] | |
} | |
if results.pred is not None: | |
for i, element in enumerate(results.pred[0]): | |
object = {} | |
#print (element[0]) | |
itemclass = round(element[5].item()) | |
object["classe"] = itemclass | |
object["nome"] = results.names[itemclass] | |
object["score"] = element[4].item() | |
object["x"] = element[0].item() | |
object["y"] = element[1].item() | |
object["w"] = element[2].item() | |
object["h"] = element[3].item() | |
tensor["tensorflow"].append(object) | |
text = json.dumps(tensor) | |
#print (text) | |
return text #results.render()[0] | |
inputs = [ | |
gr.inputs.Image(type="pil", label="Input Image"), | |
] | |
outputs = gr.outputs.Image(type="filepath", label="Output Image") | |
title = "YOLOv5" | |
description = "YOLOv5 is a family of object detection models pretrained on COCO dataset. This model is a pip implementation of the original YOLOv5 model." | |
examples = [['zidane.jpg'], ['image3.jpg']] | |
demo_app = gr.Interface( | |
fn=yolov5_inference, | |
inputs=inputs, | |
outputs=["text"], | |
title=title, | |
examples=examples, | |
#cache_examples=True, | |
#live=True, | |
#theme='huggingface', | |
) | |
demo_app.launch(debug=True, server_name="192.168.0.153", server_port=8080, enable_queue=True) | |
demo_app.launch(debug=True, enable_queue=True) | |
#demo_app.launch(debug=True, server_port=8083, enable_queue=True) |