Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -59,7 +59,6 @@ def inference(device, model_type, points_per_side, pred_iou_thresh, stability_sc
|
|
59 |
frames_num = cap.get(cv2.CAP_PROP_FRAME_COUNT)
|
60 |
W, H = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)), int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
|
61 |
fps = int(cap.get(cv2.CAP_PROP_FPS))
|
62 |
-
print(fps)
|
63 |
out = cv2.VideoWriter("output.mp4", cv2.VideoWriter_fourcc('x', '2', '6', '4'), fps, (W, H), isColor=True)
|
64 |
for _ in progress.tqdm(range(int(frames_num)), desc='Processing video ({} frames, size {}x{})'.format(int(frames_num), W, H)):
|
65 |
ret, frame = cap.read() # read a frame
|
@@ -83,7 +82,7 @@ with gr.Blocks() as demo:
|
|
83 |
# select model
|
84 |
model_type = gr.Dropdown(["vit_b", "vit_l", "vit_h"], value='vit_b', label="Select Model")
|
85 |
# select device
|
86 |
-
device = gr.Dropdown(["cpu"
|
87 |
|
88 |
# 参数
|
89 |
with gr.Accordion(label='Parameters', open=False):
|
|
|
59 |
frames_num = cap.get(cv2.CAP_PROP_FRAME_COUNT)
|
60 |
W, H = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)), int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
|
61 |
fps = int(cap.get(cv2.CAP_PROP_FPS))
|
|
|
62 |
out = cv2.VideoWriter("output.mp4", cv2.VideoWriter_fourcc('x', '2', '6', '4'), fps, (W, H), isColor=True)
|
63 |
for _ in progress.tqdm(range(int(frames_num)), desc='Processing video ({} frames, size {}x{})'.format(int(frames_num), W, H)):
|
64 |
ret, frame = cap.read() # read a frame
|
|
|
82 |
# select model
|
83 |
model_type = gr.Dropdown(["vit_b", "vit_l", "vit_h"], value='vit_b', label="Select Model")
|
84 |
# select device
|
85 |
+
device = gr.Dropdown(["cpu"], value='cpu', label="Select Device")
|
86 |
|
87 |
# 参数
|
88 |
with gr.Accordion(label='Parameters', open=False):
|