Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
NicolasHug committed Oct 30, 2023
1 parent dea1dbe commit 65cd14a
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 3 deletions.
1 change: 0 additions & 1 deletion test/test_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,6 @@ def func(z):
@needs_cuda
@pytest.mark.parametrize("x_dtype", (torch.float, torch.half))
@pytest.mark.parametrize("rois_dtype", (torch.float, torch.half))
@pytest.mark.opcheck_only_one()
def test_autocast(self, x_dtype, rois_dtype):
with torch.cuda.amp.autocast():
self.test_forward(torch.device("cuda"), contiguous=False, x_dtype=x_dtype, rois_dtype=rois_dtype)
Expand Down
3 changes: 1 addition & 2 deletions torchvision/_meta_registrations.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,6 @@ def meta_ps_roi_pool_backward(
return grad.new_empty((batch_size, channels, height, width))



@torch._custom_ops.impl_abstract("torchvision::nms")
def meta_nms(dets, scores, iou_threshold):
torch._check(dets.dim() == 2, lambda: f"boxes should be a 2d tensor, got {dets.dim()}D")
Expand All @@ -172,4 +171,4 @@ def meta_nms(dets, scores, iou_threshold):
)
ctx = torch._custom_ops.get_ctx()
num_to_keep = ctx.create_unbacked_symint()
return dets.new_empty(num_to_keep, dtype=torch.long)
return dets.new_empty(num_to_keep, dtype=torch.long)

0 comments on commit 65cd14a

Please sign in to comment.