File size: 1,174 Bytes
107040a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import random
import cv2
import numpy as np
import torch
from PIL import Image


def crop_4_patches(image):
    crop_size = int(image.size[0]/2)
    return (image.crop((0, 0, crop_size, crop_size)), image.crop((0, crop_size, crop_size, 2*crop_size)),
            image.crop((crop_size, 0, 2*crop_size, crop_size)), image.crop((crop_size, crop_size, 2*crop_size, 2*crop_size)))


def pre_processing(image, transform):
    high_level = []
    middle_level = []
    low_level = []
    crops_4 = crop_4_patches(image)
    for c_4 in crops_4:
        crops_8 = crop_4_patches(c_4)
        high_level.append(transform(crops_8[0]))
        high_level.append(transform(crops_8[3]))
        for c_8 in [crops_8[1], crops_8[2]]:
            crops_16 = crop_4_patches(c_8)
            middle_level.append(transform(crops_16[0]))
            middle_level.append(transform(crops_16[3]))
            for c_16 in [crops_16[1], crops_16[2]]:
                crops_32 = crop_4_patches(c_16)
                low_level.append(transform(crops_32[0]))
                low_level.append(transform(crops_32[3]))
    return torch.stack(high_level), torch.stack(middle_level), torch.stack(low_level)