From 9a3f096195f91ad96f97c534cc59274e9f3e5c3a Mon Sep 17 00:00:00 2001 From: Francisco Massa Date: Fri, 29 Oct 2021 17:28:54 +0200 Subject: [PATCH 1/8] Add logging to torchvision ops --- torchvision/ops/boxes.py | 9 +++++++++ torchvision/ops/deform_conv.py | 1 + torchvision/ops/feature_pyramid_network.py | 3 +++ torchvision/ops/focal_loss.py | 1 + torchvision/ops/misc.py | 5 +++++ torchvision/ops/poolers.py | 2 ++ torchvision/ops/ps_roi_align.py | 1 + torchvision/ops/ps_roi_pool.py | 1 + torchvision/ops/roi_align.py | 1 + torchvision/ops/roi_pool.py | 1 + torchvision/ops/stochastic_depth.py | 1 + 11 files changed, 26 insertions(+) diff --git a/torchvision/ops/boxes.py b/torchvision/ops/boxes.py index 8547383d34b..42f17c500d8 100644 --- a/torchvision/ops/boxes.py +++ b/torchvision/ops/boxes.py @@ -33,6 +33,7 @@ def nms(boxes: Tensor, scores: Tensor, iou_threshold: float) -> Tensor: Tensor: int64 tensor with the indices of the elements that have been kept by NMS, sorted in decreasing order of scores """ + torch._C._log_api_usage_once("torchvision.ops.nms") _assert_has_ops() return torch.ops.torchvision.nms(boxes, scores, iou_threshold) @@ -61,6 +62,7 @@ def batched_nms( Tensor: int64 tensor with the indices of the elements that have been kept by NMS, sorted in decreasing order of scores """ + torch._C._log_api_usage_once("torchvision.ops.batched_nms") # Benchmarks that drove the following thresholds are at # https://github.com/pytorch/vision/issues/1311#issuecomment-781329339 # Ideally for GPU we'd use a higher threshold @@ -120,6 +122,7 @@ def remove_small_boxes(boxes: Tensor, min_size: float) -> Tensor: Tensor[K]: indices of the boxes that have both sides larger than min_size """ + torch._C._log_api_usage_once("torchvision.ops.remove_small_boxes") ws, hs = boxes[:, 2] - boxes[:, 0], boxes[:, 3] - boxes[:, 1] keep = (ws >= min_size) & (hs >= min_size) keep = torch.where(keep)[0] @@ -138,6 +141,7 @@ def clip_boxes_to_image(boxes: Tensor, size: Tuple[int, int]) -> Tensor: Returns: Tensor[N, 4]: clipped boxes """ + torch._C._log_api_usage_once("torchvision.ops.clip_boxes_to_image") dim = boxes.dim() boxes_x = boxes[..., 0::2] boxes_y = boxes[..., 1::2] @@ -178,6 +182,7 @@ def box_convert(boxes: Tensor, in_fmt: str, out_fmt: str) -> Tensor: Tensor[N, 4]: Boxes into converted format. """ + torch._C._log_api_usage_once("torchvision.ops.box_convert") allowed_fmts = ("xyxy", "xywh", "cxcywh") if in_fmt not in allowed_fmts or out_fmt not in allowed_fmts: raise ValueError("Unsupported Bounding Box Conversions for given in_fmt and out_fmt") @@ -227,6 +232,7 @@ def box_area(boxes: Tensor) -> Tensor: Returns: Tensor[N]: the area for each box """ + torch._C._log_api_usage_once("torchvision.ops.box_area") boxes = _upcast(boxes) return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1]) @@ -262,6 +268,7 @@ def box_iou(boxes1: Tensor, boxes2: Tensor) -> Tensor: Returns: Tensor[N, M]: the NxM matrix containing the pairwise IoU values for every element in boxes1 and boxes2 """ + torch._C._log_api_usage_once("torchvision.ops.box_iou") inter, union = _box_inter_union(boxes1, boxes2) iou = inter / union return iou @@ -284,6 +291,7 @@ def generalized_box_iou(boxes1: Tensor, boxes2: Tensor) -> Tensor: for every element in boxes1 and boxes2 """ + torch._C._log_api_usage_once("torchvision.ops.generalized_box_iou") # degenerate boxes gives inf / nan results # so do an early check assert (boxes1[:, 2:] >= boxes1[:, :2]).all() @@ -315,6 +323,7 @@ def masks_to_boxes(masks: torch.Tensor) -> torch.Tensor: Returns: Tensor[N, 4]: bounding boxes """ + torch._C._log_api_usage_once("torchvision.ops.masks_to_boxes") if masks.numel() == 0: return torch.zeros((0, 4), device=masks.device, dtype=torch.float) diff --git a/torchvision/ops/deform_conv.py b/torchvision/ops/deform_conv.py index 1f7c3ce40fe..57e7833de3b 100644 --- a/torchvision/ops/deform_conv.py +++ b/torchvision/ops/deform_conv.py @@ -59,6 +59,7 @@ def deform_conv2d( >>> torch.Size([4, 5, 8, 8]) """ + torch._C._log_api_usage_once("torchvision.ops.deform_conv2d") _assert_has_ops() out_channels = weight.shape[0] diff --git a/torchvision/ops/feature_pyramid_network.py b/torchvision/ops/feature_pyramid_network.py index 8b65bf7fc22..93caa47d04b 100644 --- a/torchvision/ops/feature_pyramid_network.py +++ b/torchvision/ops/feature_pyramid_network.py @@ -4,6 +4,8 @@ import torch.nn.functional as F from torch import nn, Tensor +from ..utils import _log_api_usage_once + class ExtraFPNBlock(nn.Module): """ @@ -75,6 +77,7 @@ def __init__( extra_blocks: Optional[ExtraFPNBlock] = None, ): super().__init__() + _log_api_usage_once(self) self.inner_blocks = nn.ModuleList() self.layer_blocks = nn.ModuleList() for in_channels in in_channels_list: diff --git a/torchvision/ops/focal_loss.py b/torchvision/ops/focal_loss.py index 3f72273c39c..b5f61482727 100644 --- a/torchvision/ops/focal_loss.py +++ b/torchvision/ops/focal_loss.py @@ -30,6 +30,7 @@ def sigmoid_focal_loss( Returns: Loss tensor with the reduction option applied. """ + torch._C._log_api_usage_once("torchvision.ops.sigmoid_focal_loss") p = torch.sigmoid(inputs) ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction="none") p_t = p * targets + (1 - p) * (1 - targets) diff --git a/torchvision/ops/misc.py b/torchvision/ops/misc.py index caf0d999f77..3c344a8c5ce 100644 --- a/torchvision/ops/misc.py +++ b/torchvision/ops/misc.py @@ -14,6 +14,8 @@ import torch from torch import Tensor +from ..utils import _log_api_usage_once + class Conv2d(torch.nn.Conv2d): def __init__(self, *args, **kwargs): @@ -66,6 +68,7 @@ def __init__( warnings.warn("`n` argument is deprecated and has been renamed `num_features`", DeprecationWarning) num_features = n super().__init__() + _log_api_usage_once(self) self.eps = eps self.register_buffer("weight", torch.ones(num_features)) self.register_buffer("bias", torch.zeros(num_features)) @@ -138,6 +141,7 @@ def __init__( if activation_layer is not None: layers.append(activation_layer(inplace=inplace)) super().__init__(*layers) + _log_api_usage_once(self) self.out_channels = out_channels @@ -150,6 +154,7 @@ def __init__( scale_activation: Callable[..., torch.nn.Module] = torch.nn.Sigmoid, ) -> None: super().__init__() + _log_api_usage_once(self) self.avgpool = torch.nn.AdaptiveAvgPool2d(1) self.fc1 = torch.nn.Conv2d(input_channels, squeeze_channels, 1) self.fc2 = torch.nn.Conv2d(squeeze_channels, input_channels, 1) diff --git a/torchvision/ops/poolers.py b/torchvision/ops/poolers.py index b3c4924f12e..87b2bd86e3e 100644 --- a/torchvision/ops/poolers.py +++ b/torchvision/ops/poolers.py @@ -6,6 +6,7 @@ from torchvision.ops.boxes import box_area from .roi_align import roi_align +from ..utils import _log_api_usage_once # copying result_idx_in_level to a specific index in result[] @@ -130,6 +131,7 @@ def __init__( canonical_level: int = 4, ): super().__init__() + _log_api_usage_once(self) if isinstance(output_size, int): output_size = (output_size, output_size) self.featmap_names = featmap_names diff --git a/torchvision/ops/ps_roi_align.py b/torchvision/ops/ps_roi_align.py index 264ae352483..5ac48fe987b 100644 --- a/torchvision/ops/ps_roi_align.py +++ b/torchvision/ops/ps_roi_align.py @@ -42,6 +42,7 @@ def ps_roi_align( Returns: Tensor[K, C / (output_size[0] * output_size[1]), output_size[0], output_size[1]]: The pooled RoIs """ + torch._C._log_api_usage_once("torchvision.ops.ps_roi_align") _assert_has_ops() check_roi_boxes_shape(boxes) rois = boxes diff --git a/torchvision/ops/ps_roi_pool.py b/torchvision/ops/ps_roi_pool.py index cfc33b60428..c7727943e78 100644 --- a/torchvision/ops/ps_roi_pool.py +++ b/torchvision/ops/ps_roi_pool.py @@ -36,6 +36,7 @@ def ps_roi_pool( Returns: Tensor[K, C / (output_size[0] * output_size[1]), output_size[0], output_size[1]]: The pooled RoIs. """ + torch._C._log_api_usage_once("torchvision.ops.ps_roi_pool") _assert_has_ops() check_roi_boxes_shape(boxes) rois = boxes diff --git a/torchvision/ops/roi_align.py b/torchvision/ops/roi_align.py index e686c1c5210..f95586bac14 100644 --- a/torchvision/ops/roi_align.py +++ b/torchvision/ops/roi_align.py @@ -49,6 +49,7 @@ def roi_align( Returns: Tensor[K, C, output_size[0], output_size[1]]: The pooled RoIs. """ + torch._C._log_api_usage_once("torchvision.ops.roi_align") _assert_has_ops() check_roi_boxes_shape(boxes) rois = boxes diff --git a/torchvision/ops/roi_pool.py b/torchvision/ops/roi_pool.py index 6f8a37f4432..e32db499517 100644 --- a/torchvision/ops/roi_pool.py +++ b/torchvision/ops/roi_pool.py @@ -38,6 +38,7 @@ def roi_pool( Returns: Tensor[K, C, output_size[0], output_size[1]]: The pooled RoIs. """ + torch._C._log_api_usage_once("torchvision.ops.roi_pool") _assert_has_ops() check_roi_boxes_shape(boxes) rois = boxes diff --git a/torchvision/ops/stochastic_depth.py b/torchvision/ops/stochastic_depth.py index dd8f4da6f8d..50e0a13a9dc 100644 --- a/torchvision/ops/stochastic_depth.py +++ b/torchvision/ops/stochastic_depth.py @@ -21,6 +21,7 @@ def stochastic_depth(input: Tensor, p: float, mode: str, training: bool = True) Returns: Tensor[N, ...]: The randomly zeroed tensor. """ + torch._C._log_api_usage_once("torchvision.ops.stochastic_depth") if p < 0.0 or p > 1.0: raise ValueError(f"drop probability has to be between 0 and 1, but got {p}") if mode not in ["batch", "row"]: From 8456af5233a3bad081d3e4b7f533d925c1026273 Mon Sep 17 00:00:00 2001 From: Francisco Massa Date: Fri, 29 Oct 2021 18:31:04 +0200 Subject: [PATCH 2/8] Hack to make torchscript work --- torchvision/ops/boxes.py | 19 ++++++++++--------- torchvision/ops/deform_conv.py | 4 +++- torchvision/ops/focal_loss.py | 4 +++- torchvision/ops/ps_roi_align.py | 4 ++-- torchvision/ops/ps_roi_pool.py | 4 ++-- torchvision/ops/roi_align.py | 4 ++-- torchvision/ops/roi_pool.py | 4 ++-- torchvision/ops/stochastic_depth.py | 4 +++- torchvision/utils.py | 7 ++++++- 9 files changed, 33 insertions(+), 21 deletions(-) diff --git a/torchvision/ops/boxes.py b/torchvision/ops/boxes.py index 42f17c500d8..07a7f19ba1e 100644 --- a/torchvision/ops/boxes.py +++ b/torchvision/ops/boxes.py @@ -6,6 +6,7 @@ from torchvision.extension import _assert_has_ops from ._box_convert import _box_cxcywh_to_xyxy, _box_xyxy_to_cxcywh, _box_xywh_to_xyxy, _box_xyxy_to_xywh +from ..utils import _log_api_usage_once def nms(boxes: Tensor, scores: Tensor, iou_threshold: float) -> Tensor: @@ -33,7 +34,7 @@ def nms(boxes: Tensor, scores: Tensor, iou_threshold: float) -> Tensor: Tensor: int64 tensor with the indices of the elements that have been kept by NMS, sorted in decreasing order of scores """ - torch._C._log_api_usage_once("torchvision.ops.nms") + _log_api_usage_once("torchvision.ops.nms") _assert_has_ops() return torch.ops.torchvision.nms(boxes, scores, iou_threshold) @@ -62,7 +63,7 @@ def batched_nms( Tensor: int64 tensor with the indices of the elements that have been kept by NMS, sorted in decreasing order of scores """ - torch._C._log_api_usage_once("torchvision.ops.batched_nms") + _log_api_usage_once("torchvision.ops.batched_nms") # Benchmarks that drove the following thresholds are at # https://github.com/pytorch/vision/issues/1311#issuecomment-781329339 # Ideally for GPU we'd use a higher threshold @@ -122,7 +123,7 @@ def remove_small_boxes(boxes: Tensor, min_size: float) -> Tensor: Tensor[K]: indices of the boxes that have both sides larger than min_size """ - torch._C._log_api_usage_once("torchvision.ops.remove_small_boxes") + _log_api_usage_once("torchvision.ops.remove_small_boxes") ws, hs = boxes[:, 2] - boxes[:, 0], boxes[:, 3] - boxes[:, 1] keep = (ws >= min_size) & (hs >= min_size) keep = torch.where(keep)[0] @@ -141,7 +142,7 @@ def clip_boxes_to_image(boxes: Tensor, size: Tuple[int, int]) -> Tensor: Returns: Tensor[N, 4]: clipped boxes """ - torch._C._log_api_usage_once("torchvision.ops.clip_boxes_to_image") + _log_api_usage_once("torchvision.ops.clip_boxes_to_image") dim = boxes.dim() boxes_x = boxes[..., 0::2] boxes_y = boxes[..., 1::2] @@ -182,7 +183,7 @@ def box_convert(boxes: Tensor, in_fmt: str, out_fmt: str) -> Tensor: Tensor[N, 4]: Boxes into converted format. """ - torch._C._log_api_usage_once("torchvision.ops.box_convert") + _log_api_usage_once("torchvision.ops.box_convert") allowed_fmts = ("xyxy", "xywh", "cxcywh") if in_fmt not in allowed_fmts or out_fmt not in allowed_fmts: raise ValueError("Unsupported Bounding Box Conversions for given in_fmt and out_fmt") @@ -232,7 +233,7 @@ def box_area(boxes: Tensor) -> Tensor: Returns: Tensor[N]: the area for each box """ - torch._C._log_api_usage_once("torchvision.ops.box_area") + _log_api_usage_once("torchvision.ops.box_area") boxes = _upcast(boxes) return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1]) @@ -268,7 +269,7 @@ def box_iou(boxes1: Tensor, boxes2: Tensor) -> Tensor: Returns: Tensor[N, M]: the NxM matrix containing the pairwise IoU values for every element in boxes1 and boxes2 """ - torch._C._log_api_usage_once("torchvision.ops.box_iou") + _log_api_usage_once("torchvision.ops.box_iou") inter, union = _box_inter_union(boxes1, boxes2) iou = inter / union return iou @@ -291,7 +292,7 @@ def generalized_box_iou(boxes1: Tensor, boxes2: Tensor) -> Tensor: for every element in boxes1 and boxes2 """ - torch._C._log_api_usage_once("torchvision.ops.generalized_box_iou") + _log_api_usage_once("torchvision.ops.generalized_box_iou") # degenerate boxes gives inf / nan results # so do an early check assert (boxes1[:, 2:] >= boxes1[:, :2]).all() @@ -323,7 +324,7 @@ def masks_to_boxes(masks: torch.Tensor) -> torch.Tensor: Returns: Tensor[N, 4]: bounding boxes """ - torch._C._log_api_usage_once("torchvision.ops.masks_to_boxes") + _log_api_usage_once("torchvision.ops.masks_to_boxes") if masks.numel() == 0: return torch.zeros((0, 4), device=masks.device, dtype=torch.float) diff --git a/torchvision/ops/deform_conv.py b/torchvision/ops/deform_conv.py index 57e7833de3b..f64621424b1 100644 --- a/torchvision/ops/deform_conv.py +++ b/torchvision/ops/deform_conv.py @@ -8,6 +8,8 @@ from torch.nn.parameter import Parameter from torchvision.extension import _assert_has_ops +from ..utils import _log_api_usage_once + def deform_conv2d( input: Tensor, @@ -59,7 +61,7 @@ def deform_conv2d( >>> torch.Size([4, 5, 8, 8]) """ - torch._C._log_api_usage_once("torchvision.ops.deform_conv2d") + _log_api_usage_once("torchvision.ops.deform_conv2d") _assert_has_ops() out_channels = weight.shape[0] diff --git a/torchvision/ops/focal_loss.py b/torchvision/ops/focal_loss.py index b5f61482727..1a149ed4120 100644 --- a/torchvision/ops/focal_loss.py +++ b/torchvision/ops/focal_loss.py @@ -1,6 +1,8 @@ import torch import torch.nn.functional as F +from ..utils import _log_api_usage_once + def sigmoid_focal_loss( inputs: torch.Tensor, @@ -30,7 +32,7 @@ def sigmoid_focal_loss( Returns: Loss tensor with the reduction option applied. """ - torch._C._log_api_usage_once("torchvision.ops.sigmoid_focal_loss") + _log_api_usage_once("torchvision.ops.sigmoid_focal_loss") p = torch.sigmoid(inputs) ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction="none") p_t = p * targets + (1 - p) * (1 - targets) diff --git a/torchvision/ops/ps_roi_align.py b/torchvision/ops/ps_roi_align.py index 5ac48fe987b..3d4402c23dd 100644 --- a/torchvision/ops/ps_roi_align.py +++ b/torchvision/ops/ps_roi_align.py @@ -3,7 +3,7 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape, _log_api_usage_once def ps_roi_align( @@ -42,7 +42,7 @@ def ps_roi_align( Returns: Tensor[K, C / (output_size[0] * output_size[1]), output_size[0], output_size[1]]: The pooled RoIs """ - torch._C._log_api_usage_once("torchvision.ops.ps_roi_align") + _log_api_usage_once("torchvision.ops.ps_roi_align") _assert_has_ops() check_roi_boxes_shape(boxes) rois = boxes diff --git a/torchvision/ops/ps_roi_pool.py b/torchvision/ops/ps_roi_pool.py index c7727943e78..565ce3e8e11 100644 --- a/torchvision/ops/ps_roi_pool.py +++ b/torchvision/ops/ps_roi_pool.py @@ -3,7 +3,7 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape, _log_api_usage_once def ps_roi_pool( @@ -36,7 +36,7 @@ def ps_roi_pool( Returns: Tensor[K, C / (output_size[0] * output_size[1]), output_size[0], output_size[1]]: The pooled RoIs. """ - torch._C._log_api_usage_once("torchvision.ops.ps_roi_pool") + _log_api_usage_once("torchvision.ops.ps_roi_pool") _assert_has_ops() check_roi_boxes_shape(boxes) rois = boxes diff --git a/torchvision/ops/roi_align.py b/torchvision/ops/roi_align.py index f95586bac14..abdcb69388d 100644 --- a/torchvision/ops/roi_align.py +++ b/torchvision/ops/roi_align.py @@ -6,7 +6,7 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape, _log_api_usage_once def roi_align( @@ -49,7 +49,7 @@ def roi_align( Returns: Tensor[K, C, output_size[0], output_size[1]]: The pooled RoIs. """ - torch._C._log_api_usage_once("torchvision.ops.roi_align") + _log_api_usage_once("torchvision.ops.roi_align") _assert_has_ops() check_roi_boxes_shape(boxes) rois = boxes diff --git a/torchvision/ops/roi_pool.py b/torchvision/ops/roi_pool.py index e32db499517..ae156d166ba 100644 --- a/torchvision/ops/roi_pool.py +++ b/torchvision/ops/roi_pool.py @@ -6,7 +6,7 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape, _log_api_usage_once def roi_pool( @@ -38,7 +38,7 @@ def roi_pool( Returns: Tensor[K, C, output_size[0], output_size[1]]: The pooled RoIs. """ - torch._C._log_api_usage_once("torchvision.ops.roi_pool") + _log_api_usage_once("torchvision.ops.roi_pool") _assert_has_ops() check_roi_boxes_shape(boxes) rois = boxes diff --git a/torchvision/ops/stochastic_depth.py b/torchvision/ops/stochastic_depth.py index 50e0a13a9dc..b2a0aec233d 100644 --- a/torchvision/ops/stochastic_depth.py +++ b/torchvision/ops/stochastic_depth.py @@ -2,6 +2,8 @@ import torch.fx from torch import nn, Tensor +from ..utils import _log_api_usage_once + def stochastic_depth(input: Tensor, p: float, mode: str, training: bool = True) -> Tensor: """ @@ -21,7 +23,7 @@ def stochastic_depth(input: Tensor, p: float, mode: str, training: bool = True) Returns: Tensor[N, ...]: The randomly zeroed tensor. """ - torch._C._log_api_usage_once("torchvision.ops.stochastic_depth") + _log_api_usage_once("torchvision.ops.stochastic_depth") if p < 0.0 or p > 1.0: raise ValueError(f"drop probability has to be between 0 and 1, but got {p}") if mode not in ["batch", "row"]: diff --git a/torchvision/utils.py b/torchvision/utils.py index 399dc3fcc5a..37c570bdd2c 100644 --- a/torchvision/utils.py +++ b/torchvision/utils.py @@ -306,4 +306,9 @@ def _generate_color_palette(num_masks: int): def _log_api_usage_once(obj: object) -> None: - torch._C._log_api_usage_once(f"{obj.__module__}.{obj.__class__.__name__}") + if torch.jit.is_scripting(): + return + if isinstance(obj, str): + torch._C._log_api_usage_once(obj) + else: + torch._C._log_api_usage_once(f"{obj.__module__}.{obj.__class__.__name__}") From b82a6ca63efe3b80d072b699e9898a0dde4efd9f Mon Sep 17 00:00:00 2001 From: Francisco Massa Date: Fri, 29 Oct 2021 19:14:02 +0200 Subject: [PATCH 3/8] Bugfix --- torchvision/ops/ps_roi_align.py | 3 ++- torchvision/ops/ps_roi_pool.py | 3 ++- torchvision/ops/roi_align.py | 3 ++- torchvision/ops/roi_pool.py | 3 ++- torchvision/utils.py | 7 ++++--- 5 files changed, 12 insertions(+), 7 deletions(-) diff --git a/torchvision/ops/ps_roi_align.py b/torchvision/ops/ps_roi_align.py index 3d4402c23dd..2fea1c0330d 100644 --- a/torchvision/ops/ps_roi_align.py +++ b/torchvision/ops/ps_roi_align.py @@ -3,7 +3,8 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape, _log_api_usage_once +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape +from ..utils import _log_api_usage_once def ps_roi_align( diff --git a/torchvision/ops/ps_roi_pool.py b/torchvision/ops/ps_roi_pool.py index 565ce3e8e11..b1d38c53162 100644 --- a/torchvision/ops/ps_roi_pool.py +++ b/torchvision/ops/ps_roi_pool.py @@ -3,7 +3,8 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape, _log_api_usage_once +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape +from ..utils import _log_api_usage_once def ps_roi_pool( diff --git a/torchvision/ops/roi_align.py b/torchvision/ops/roi_align.py index abdcb69388d..5401c960f55 100644 --- a/torchvision/ops/roi_align.py +++ b/torchvision/ops/roi_align.py @@ -6,7 +6,8 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape, _log_api_usage_once +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape +from ..utils import _log_api_usage_once def roi_align( diff --git a/torchvision/ops/roi_pool.py b/torchvision/ops/roi_pool.py index ae156d166ba..f2d29e5fd37 100644 --- a/torchvision/ops/roi_pool.py +++ b/torchvision/ops/roi_pool.py @@ -6,7 +6,8 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape, _log_api_usage_once +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape +from ..utils import _log_api_usage_once def roi_pool( diff --git a/torchvision/utils.py b/torchvision/utils.py index 37c570bdd2c..2b0899f033c 100644 --- a/torchvision/utils.py +++ b/torchvision/utils.py @@ -305,9 +305,10 @@ def _generate_color_palette(num_masks: int): return [tuple((i * palette) % 255) for i in range(num_masks)] -def _log_api_usage_once(obj: object) -> None: - if torch.jit.is_scripting(): - return +@torch.jit.ignore +def _log_api_usage_once(obj: str) -> None: + # NOTE: obj can be an object as well, but mocking it here to be + # only a string to appease torchscript if isinstance(obj, str): torch._C._log_api_usage_once(obj) else: From 8224b1d52ec63f9df2812f10c7d343d857200e35 Mon Sep 17 00:00:00 2001 From: Francisco Massa Date: Fri, 29 Oct 2021 19:49:44 +0200 Subject: [PATCH 4/8] Bugfix --- torchvision/ops/boxes.py | 2 +- torchvision/ops/ps_roi_align.py | 2 +- torchvision/ops/ps_roi_pool.py | 2 +- torchvision/ops/roi_align.py | 2 +- torchvision/ops/roi_pool.py | 2 +- torchvision/utils.py | 3 ++- 6 files changed, 7 insertions(+), 6 deletions(-) diff --git a/torchvision/ops/boxes.py b/torchvision/ops/boxes.py index 07a7f19ba1e..10a03a907e8 100644 --- a/torchvision/ops/boxes.py +++ b/torchvision/ops/boxes.py @@ -5,8 +5,8 @@ from torch import Tensor from torchvision.extension import _assert_has_ops -from ._box_convert import _box_cxcywh_to_xyxy, _box_xyxy_to_cxcywh, _box_xywh_to_xyxy, _box_xyxy_to_xywh from ..utils import _log_api_usage_once +from ._box_convert import _box_cxcywh_to_xyxy, _box_xyxy_to_cxcywh, _box_xywh_to_xyxy, _box_xyxy_to_xywh def nms(boxes: Tensor, scores: Tensor, iou_threshold: float) -> Tensor: diff --git a/torchvision/ops/ps_roi_align.py b/torchvision/ops/ps_roi_align.py index 2fea1c0330d..4ed4ead89ff 100644 --- a/torchvision/ops/ps_roi_align.py +++ b/torchvision/ops/ps_roi_align.py @@ -3,8 +3,8 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape from ..utils import _log_api_usage_once +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape def ps_roi_align( diff --git a/torchvision/ops/ps_roi_pool.py b/torchvision/ops/ps_roi_pool.py index b1d38c53162..6bab125f04f 100644 --- a/torchvision/ops/ps_roi_pool.py +++ b/torchvision/ops/ps_roi_pool.py @@ -3,8 +3,8 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape from ..utils import _log_api_usage_once +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape def ps_roi_pool( diff --git a/torchvision/ops/roi_align.py b/torchvision/ops/roi_align.py index 5401c960f55..3f80383855b 100644 --- a/torchvision/ops/roi_align.py +++ b/torchvision/ops/roi_align.py @@ -6,8 +6,8 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape from ..utils import _log_api_usage_once +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape def roi_align( diff --git a/torchvision/ops/roi_pool.py b/torchvision/ops/roi_pool.py index f2d29e5fd37..deef590c953 100644 --- a/torchvision/ops/roi_pool.py +++ b/torchvision/ops/roi_pool.py @@ -6,8 +6,8 @@ from torch.nn.modules.utils import _pair from torchvision.extension import _assert_has_ops -from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape from ..utils import _log_api_usage_once +from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape def roi_pool( diff --git a/torchvision/utils.py b/torchvision/utils.py index 2b0899f033c..bdf2bdabcd8 100644 --- a/torchvision/utils.py +++ b/torchvision/utils.py @@ -305,8 +305,9 @@ def _generate_color_palette(num_masks: int): return [tuple((i * palette) % 255) for i in range(num_masks)] -@torch.jit.ignore def _log_api_usage_once(obj: str) -> None: + if torch.jit.is_scripting() or torch.jit.is_tracing(): + return # NOTE: obj can be an object as well, but mocking it here to be # only a string to appease torchscript if isinstance(obj, str): From c37078441acab43072c34e5a79136aa42f28c561 Mon Sep 17 00:00:00 2001 From: Francisco Massa Date: Fri, 29 Oct 2021 19:52:26 +0200 Subject: [PATCH 5/8] Lint --- torchvision/ops/poolers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchvision/ops/poolers.py b/torchvision/ops/poolers.py index 87b2bd86e3e..c22012a143f 100644 --- a/torchvision/ops/poolers.py +++ b/torchvision/ops/poolers.py @@ -5,8 +5,8 @@ from torch import nn, Tensor from torchvision.ops.boxes import box_area -from .roi_align import roi_align from ..utils import _log_api_usage_once +from .roi_align import roi_align # copying result_idx_in_level to a specific index in result[] From e5b3d8899dfa616b824b3955f3b2573a1d46b965 Mon Sep 17 00:00:00 2001 From: Francisco Massa Date: Fri, 29 Oct 2021 20:00:46 +0200 Subject: [PATCH 6/8] mypy... let's silence it --- torchvision/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchvision/utils.py b/torchvision/utils.py index bdf2bdabcd8..7c1e1d6e2f7 100644 --- a/torchvision/utils.py +++ b/torchvision/utils.py @@ -305,7 +305,7 @@ def _generate_color_palette(num_masks: int): return [tuple((i * palette) % 255) for i in range(num_masks)] -def _log_api_usage_once(obj: str) -> None: +def _log_api_usage_once(obj: str) -> None: # type: ignore if torch.jit.is_scripting() or torch.jit.is_tracing(): return # NOTE: obj can be an object as well, but mocking it here to be From d0af9fb2bec6c3e1608b2beb6eaa607daba979b0 Mon Sep 17 00:00:00 2001 From: Francisco Massa Date: Fri, 29 Oct 2021 20:09:58 +0200 Subject: [PATCH 7/8] Fighting with mymy --- torchvision/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchvision/utils.py b/torchvision/utils.py index 7c1e1d6e2f7..60fa949aa2f 100644 --- a/torchvision/utils.py +++ b/torchvision/utils.py @@ -305,7 +305,7 @@ def _generate_color_palette(num_masks: int): return [tuple((i * palette) % 255) for i in range(num_masks)] -def _log_api_usage_once(obj: str) -> None: # type: ignore +def _log_api_usage_once(obj) -> None: # type: ignore if torch.jit.is_scripting() or torch.jit.is_tracing(): return # NOTE: obj can be an object as well, but mocking it here to be From 693ea250b872042c45545f7fa8bbc69cf958cd22 Mon Sep 17 00:00:00 2001 From: Francisco Massa Date: Fri, 29 Oct 2021 20:32:32 +0200 Subject: [PATCH 8/8] One more try --- torchvision/utils.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/torchvision/utils.py b/torchvision/utils.py index 60fa949aa2f..9dd3c9c5ab3 100644 --- a/torchvision/utils.py +++ b/torchvision/utils.py @@ -1,7 +1,7 @@ import math import pathlib import warnings -from typing import Union, Optional, List, Tuple, BinaryIO +from typing import Union, Optional, List, Tuple, BinaryIO, no_type_check import numpy as np import torch @@ -305,7 +305,8 @@ def _generate_color_palette(num_masks: int): return [tuple((i * palette) % 255) for i in range(num_masks)] -def _log_api_usage_once(obj) -> None: # type: ignore +@no_type_check +def _log_api_usage_once(obj: str) -> None: # type: ignore if torch.jit.is_scripting() or torch.jit.is_tracing(): return # NOTE: obj can be an object as well, but mocking it here to be