Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions official/vision/detection/layers/det/anchor.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ class AnchorBoxGenerator(BaseAnchorGenerator):
offset (float): center point offset. default is 0.5.
"""

# pylint: disable=dangerous-default-value
def __init__(
self,
anchor_scales: list = [[32], [64], [128], [256], [512]],
Expand Down Expand Up @@ -135,6 +136,7 @@ class AnchorPointGenerator(BaseAnchorGenerator):
offset (float): center point offset. default is 0.5.
"""

# pylint: disable=dangerous-default-value
def __init__(
self,
num_anchors: int = 1,
Expand Down
1 change: 1 addition & 0 deletions official/vision/detection/layers/det/box_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def decode(self) -> Tensor:


class BoxCoder(BoxCoderBase, metaclass=ABCMeta):
# pylint: disable=dangerous-default-value
def __init__(
self,
reg_mean=[0.0, 0.0, 0.0, 0.0],
Expand Down
1 change: 1 addition & 0 deletions official/vision/detection/layers/det/fpn.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ class FPN(M.Module):
are produced by the backbone networks like ResNet.
"""

# pylint: disable=dangerous-default-value
def __init__(
self,
bottom_up: M.Module,
Expand Down
1 change: 1 addition & 0 deletions official/vision/detection/models/faster_rcnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ def inference(self, features, im_info):


class FasterRCNNConfig:
# pylint: disable=too-many-statements
def __init__(self):
self.backbone = "resnet50"
self.backbone_pretrained = True
Expand Down
2 changes: 1 addition & 1 deletion official/vision/detection/tools/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def make_parser():


def main():
# pylint: disable=import-outside-toplevel
# pylint: disable=import-outside-toplevel,too-many-branches,too-many-statements
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval

Expand Down
5 changes: 3 additions & 2 deletions official/vision/detection/tools/test_random.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@
import megengine.distributed as dist
from megengine.data import DataLoader

from official.vision.detection.tools.data_mapper import data_mapper
from official.vision.detection.tools.utils import (
InferenceSampler,
PseudoDetectionDataset,
DetEvaluator,
import_from_file
)
Expand Down Expand Up @@ -47,7 +47,7 @@ def make_parser():


def main():
# pylint: disable=import-outside-toplevel
# pylint: disable=import-outside-toplevel,too-many-branches,too-many-statements
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval

Expand Down Expand Up @@ -204,6 +204,7 @@ def worker(
result_list.append(result)


# pylint: disable=unused-argument
def build_dataloader(rank, world_size, dataset_dir, cfg):
val_dataset = PseudoDetectionDataset(length=5000, order=["image", "info"])
val_sampler = InferenceSampler(val_dataset, 1, world_size=world_size, rank=rank)
Expand Down
1 change: 1 addition & 0 deletions official/vision/detection/tools/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,7 @@ def build_dataset(dataset_dir, cfg):
return data_mapper[data_name](**data_cfg)


# pylint: disable=dangerous-default-value
def build_sampler(train_dataset, batch_size, aspect_grouping=[1]):
def _compute_aspect_ratios(dataset):
aspect_ratios = []
Expand Down
6 changes: 3 additions & 3 deletions official/vision/detection/tools/train_random.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import argparse
import bisect
import copy
import multiprocessing as mp
import os
import time
Expand All @@ -21,7 +20,6 @@
# from megengine.jit import trace
from megengine.optimizer import SGD

from official.vision.detection.tools.data_mapper import data_mapper
from official.vision.detection.tools.utils import (
AverageMeter,
DetectionPadCollator,
Expand Down Expand Up @@ -220,10 +218,12 @@ def adjust_learning_rate(optimizer, epoch, step, cfg, args):
param_group["lr"] = base_lr * lr_factor


def build_dataset(*args):
# pylint: disable=unused-argument
def build_dataset(dataset_dir, cfg):
return PseudoDetectionDataset(order=["image", "boxes", "boxes_category", "info"])


# pylint: disable=dangerous-default-value
def build_sampler(train_dataset, batch_size, aspect_grouping=[1]):
def _compute_aspect_ratios(dataset):
aspect_ratios = []
Expand Down
3 changes: 3 additions & 0 deletions official/vision/detection/tools/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,9 @@ def __init__(self, dataset, batch_size=1, world_size=None, rank=None):
end = min(self.num_samples * (self.rank + 1), len(self.dataset))
self.indices = list(range(begin, end))

def sample(self):
pass

def batch(self):
step, length = self.batch_size, len(self.indices)
batch_index = [self.indices[i : i + step] for i in range(0, length, step)]
Expand Down
1 change: 1 addition & 0 deletions official/vision/segmentation/tools/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,7 @@ def compute_metric(result_list, cfg):
k = (gt >= 0) & (gt < num_classes)
labeled += np.sum(k)
correct += np.sum((pred[k] == gt[k]))
# pylint: disable=no-member
hist += np.bincount(
num_classes * gt[k].astype(int) + pred[k].astype(int),
minlength=num_classes ** 2
Expand Down
1 change: 1 addition & 0 deletions official/vision/segmentation/tools/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ def main():
worker(None, None, 1, 0, args)


# pylint: disable=too-many-branches
def worker(master_ip, port, world_size, rank, args):
if world_size > 1:
dist.init_process_group(
Expand Down
3 changes: 3 additions & 0 deletions official/vision/segmentation/tools/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,9 @@ def __init__(self, dataset, batch_size=1, world_size=None, rank=None):
end = min(self.num_samples * (self.rank + 1), len(self.dataset))
self.indices = list(range(begin, end))

def sample(self):
pass

def batch(self):
step, length = self.batch_size, len(self.indices)
batch_index = [self.indices[i : i + step] for i in range(0, length, step)]
Expand Down