Spaces:
Running
Running
| import torch | |
| from itertools import product as product | |
| import numpy as np | |
| from math import ceil | |
| class PriorBox(object): | |
| def __init__(self, cfg, image_size=None, phase='train'): | |
| super(PriorBox, self).__init__() | |
| self.msizes = cfg['msizes'] | |
| self.steps = cfg['steps'] | |
| self.clip = cfg['clip'] | |
| self.image_size = image_size | |
| self.feature_maps = [[ceil(self.image_size[0]/step), ceil(self.image_size[1]/step)] for step in self.steps] | |
| self.name = "s" | |
| def forward(self): | |
| anchors = [] | |
| for k, f in enumerate(self.feature_maps): | |
| msizes = self.msizes[k] | |
| for i, j in product(range(f[0]), range(f[1])): | |
| for msize in msizes: | |
| s_kx = msize / self.image_size[1] | |
| s_ky = msize / self.image_size[0] | |
| dense_cx = [x * self.steps[k] / self.image_size[1] for x in [j + 0.5]] | |
| dense_cy = [y * self.steps[k] / self.image_size[0] for y in [i + 0.5]] | |
| for cy, cx in product(dense_cy, dense_cx): | |
| anchors += [cx, cy, s_kx, s_ky] | |
| # back to torch land | |
| output = torch.Tensor(anchors).view(-1, 4) | |
| if self.clip: | |
| output.clamp_(max=1, min=0) | |
| return output | |