File size: 2,139 Bytes
afaf3bd
 
09ceeb4
ad5d54b
afaf3bd
 
c255ae4
924c3e9
 
afaf3bd
 
 
924c3e9
 
 
 
 
 
 
 
c255ae4
924c3e9
afaf3bd
 
 
 
 
40e0d6d
afaf3bd
 
86a48df
170fd84
 
afaf3bd
 
170fd84
afaf3bd
 
924c3e9
afaf3bd
11dddd7
 
86a48df
 
5123e56
86a48df
 
c255ae4
40e0d6d
11dddd7
b8ed6e4
924c3e9
 
c255ae4
 
91b37a6
b8ed6e4
 
 
170fd84
afaf3bd
924c3e9
40e0d6d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import gradio as gr
from ultralytics import YOLO
from ultralytics.solutions import ai_gym
# import ai_gym
import cv2

def process(video_path, pose_type):
    model = YOLO("yolov8n-pose.pt")
    cap = cv2.VideoCapture(video_path)
    assert cap.isOpened(), "Error reading video file"
    w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))

    video_writer = cv2.VideoWriter("output_video.mp4",
                                   cv2.VideoWriter_fourcc(*'mp4v'),
                                   fps,
                                   (w, h))

    gym_object = ai_gym.AIGym()  # init AI GYM module
    gym_object.set_args(line_thickness=2,
                        view_img=False,  # Set view_img to False to prevent displaying the video in real-time
                        pose_type=pose_type,
                        kpts_to_check=[6, 8, 10])

    frame_count = 0
    while cap.isOpened():
        success, im0 = cap.read()
        if not success:
            print("Video processing has been successfully completed.")
            break
        frame_count += 1
        results = model.track(im0, verbose=True)  # Tracking recommended
        im0 = gym_object.start_counting(im0, results, frame_count)
        video_writer.write(im0)

    cap.release()
    video_writer.release()
    cv2.destroyAllWindows()

    return "output_video.mp4"

title = "Workout - Monitoring"
description = "This Space counts the number of exercise in a video."
inputs = [
    gr.Video(label='Input Video'),  
    gr.Radio(["pullup", "pushup", "abworkout"], label="Pose Type")
]

outputs = gr.Video(label='Output Video')

# example_list = [['Examples/PULL-UPS.mp4'],['Examples/PUSH-UPS.mp4']]
# example_list = ['Examples/PULL-UPS.mp4']
# Create the Gradio demo
demo = gr.Interface(fn=process,
                    inputs=inputs,
                    outputs=outputs,
                    title=title,
                    description=description
                    # examples=example_list,
                    # cache_examples=True
                   )

# Launch the demo!
demo.launch(show_api=True)