omarelsayeed
commited on
Commit
•
5a79011
1
Parent(s):
08b71ab
Update app.py
Browse files
app.py
CHANGED
@@ -54,13 +54,11 @@ def parse_logits(logits: torch.Tensor, length: int) -> List[int]:
|
|
54 |
return ret
|
55 |
|
56 |
def get_orders(image_path, boxes):
|
57 |
-
|
58 |
-
|
59 |
-
print(boxes)
|
60 |
-
inputs = boxes2inputs(boxes)
|
61 |
inputs = {k: v.to(layout_model.device) for k, v in inputs.items()} # Move inputs to model device
|
62 |
logits = layout_model(**inputs).logits.cpu().squeeze(0) # Perform inference and get logits
|
63 |
-
orders = parse_logits(logits, len(
|
64 |
return orders
|
65 |
|
66 |
|
@@ -250,6 +248,7 @@ def remove_overlapping_and_inside_boxes(boxes, classes):
|
|
250 |
|
251 |
|
252 |
def full_predictions(IMAGE_PATH, conf_threshold, iou_threshold):
|
|
|
253 |
bboxes, classes = detect_layout(IMAGE_PATH, conf_threshold, iou_threshold)
|
254 |
bboxes, classes = remove_overlapping_and_inside_boxes(bboxes, classes)
|
255 |
orders = get_orders(IMAGE_PATH, bboxes)
|
|
|
54 |
return ret
|
55 |
|
56 |
def get_orders(image_path, boxes):
|
57 |
+
b = scale_and_normalize_boxes(boxes)
|
58 |
+
inputs = boxes2inputs(b)
|
|
|
|
|
59 |
inputs = {k: v.to(layout_model.device) for k, v in inputs.items()} # Move inputs to model device
|
60 |
logits = layout_model(**inputs).logits.cpu().squeeze(0) # Perform inference and get logits
|
61 |
+
orders = parse_logits(logits, len(b))
|
62 |
return orders
|
63 |
|
64 |
|
|
|
248 |
|
249 |
|
250 |
def full_predictions(IMAGE_PATH, conf_threshold, iou_threshold):
|
251 |
+
IMAGE_PATH = IMAGE_PATH.resize((1024,1024))
|
252 |
bboxes, classes = detect_layout(IMAGE_PATH, conf_threshold, iou_threshold)
|
253 |
bboxes, classes = remove_overlapping_and_inside_boxes(bboxes, classes)
|
254 |
orders = get_orders(IMAGE_PATH, bboxes)
|