Skip to content

Commit

Permalink
Fix concatenation in , docstring of truths' shape in , infinity in en…
Browse files Browse the repository at this point in the history
…coded target boxes, zero object detection in , credit to amdegroot#116, amdegroot#144, amdegroot#194
  • Loading branch information
d-li14 committed Oct 15, 2018
1 parent e128a7b commit 26743e6
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions layers/box_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ def center_size(boxes):
Return:
boxes: (tensor) Converted xmin, ymin, xmax, ymax form of boxes.
"""
return torch.cat((boxes[:, 2:] + boxes[:, :2])/2, # cx, cy
boxes[:, 2:] - boxes[:, :2], 1) # w, h
return torch.cat(((boxes[:, 2:] + boxes[:, :2])/2, # cx, cy
boxes[:, 2:] - boxes[:, :2]), 1) # w, h


def intersect(box_a, box_b):
Expand Down Expand Up @@ -74,7 +74,7 @@ def match(threshold, truths, priors, variances, labels, loc_t, conf_t, idx):
corresponding to both confidence and location preds.
Args:
threshold: (float) The overlap threshold used when mathing boxes.
truths: (tensor) Ground truth boxes, Shape: [num_obj, num_priors].
truths: (tensor) Ground truth boxes, Shape: [num_obj, 4].
priors: (tensor) Prior boxes from priorbox layers, Shape: [n_priors,4].
variances: (tensor) Variances corresponding to each prior coord,
Shape: [num_priors, 4].
Expand Down Expand Up @@ -131,7 +131,7 @@ def encode(matched, priors, variances):
g_cxcy /= (variances[0] * priors[:, 2:])
# match wh / prior wh
g_wh = (matched[:, 2:] - matched[:, :2]) / priors[:, 2:]
g_wh = torch.log(g_wh) / variances[1]
g_wh = torch.log(g_wh + 1e-10) / variances[1]
# return target for smooth_l1_loss
return torch.cat([g_cxcy, g_wh], 1) # [num_priors,4]

Expand Down Expand Up @@ -186,7 +186,7 @@ def nms(boxes, scores, overlap=0.5, top_k=200):

keep = scores.new(scores.size(0)).zero_().long()
if boxes.numel() == 0:
return keep
return keep, 0
x1 = boxes[:, 0]
y1 = boxes[:, 1]
x2 = boxes[:, 2]
Expand Down

0 comments on commit 26743e6

Please sign in to comment.