Skip to content

Commit

Permalink
[Enhancement] Upgrade isort in pre-commit hook (#44)
Browse files Browse the repository at this point in the history
* [Docs] update batch size

* [Enhancement] Upgrade isort in pre-commit hook

* update mmdet version

* update mmcls version

* update ci
  • Loading branch information
linyq17 committed Feb 18, 2022
1 parent 1ad6ff7 commit 839115f
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 18 deletions.
8 changes: 2 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,8 @@ repos:
rev: 3.8.3
hooks:
- id: flake8
- repo: https://github.com/asottile/seed-isort-config
rev: v2.2.0
hooks:
- id: seed-isort-config
- repo: https://github.com/timothycrosley/isort
rev: 4.3.21
- repo: https://github.com/PyCQA/isort
rev: 5.10.1
hooks:
- id: isort
- repo: https://github.com/pre-commit/mirrors-yapf
Expand Down
4 changes: 2 additions & 2 deletions mmfewshot/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def digit_version(version_str):
f'Please install mmcv>={mmcv_minimum_version}, <={mmcv_maximum_version}.'

mmdet_minimum_version = '2.16.0'
mmdet_maximum_version = '2.20.0'
mmdet_maximum_version = '2.21.0'
mmdet_version = digit_version(mmdet.__version__)


Expand All @@ -43,7 +43,7 @@ def digit_version(version_str):
<={mmdet_maximum_version}.'

mmcls_minimum_version = '0.15.0'
mmcls_maximum_version = '0.19.0'
mmcls_maximum_version = '0.21.0'
mmcls_version = digit_version(mmcls.__version__)


Expand Down
10 changes: 6 additions & 4 deletions mmfewshot/detection/core/evaluation/eval_hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,9 @@ def _do_evaluate(self, runner: Runner) -> None:
# identified the 'train', 'val' and 'model_init' stages instead
# of `return_loss` in mmdet. Thus, `single_gpu_test` should be
# imported from mmfewshot.
from mmfewshot.detection.apis import \
(single_gpu_model_init, single_gpu_test)
from mmfewshot.detection.apis import (single_gpu_model_init,
single_gpu_test)

# `single_gpu_model_init` extracts features from
# `model_init_dataloader` for model initialization with single gpu.
single_gpu_model_init(runner.model, self.model_init_dataloader)
Expand Down Expand Up @@ -102,8 +103,9 @@ def _do_evaluate(self, runner: Runner) -> None:
# identified the 'train', 'val' and 'model_init' stages instead
# of `return_loss` in mmdet. Thus, `multi_gpu_test` should be
# imported from mmfewshot.
from mmfewshot.detection.apis import \
(multi_gpu_model_init, multi_gpu_test)
from mmfewshot.detection.apis import (multi_gpu_model_init,
multi_gpu_test)

# Noted that `model_init_dataloader` should NOT use distributed
# sampler to make all the models on different gpus get same data
# results in the same initialized models.
Expand Down
9 changes: 5 additions & 4 deletions mmfewshot/detection/datasets/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,7 @@ def build_dataloader(dataset: Dataset,
seed=seed) if seed is not None else None
if isinstance(dataset, QueryAwareDataset):
from mmfewshot.utils import multi_pipeline_collate_fn

# `QueryAwareDataset` will return a list of DataContainer
# `multi_pipeline_collate_fn` are designed to handle
# the data with list[list[DataContainer]]
Expand All @@ -203,12 +204,12 @@ def build_dataloader(dataset: Dataset,
worker_init_fn=init_fn,
**kwargs)
elif isinstance(dataset, NWayKShotDataset):
from .dataloader_wrappers import NWayKShotDataloader
from mmfewshot.utils import multi_pipeline_collate_fn
from .dataloader_wrappers import NWayKShotDataloader

# `NWayKShotDataset` will return a list of DataContainer
# `multi_pipeline_collate_fn` are designed to handle
# the data with list[list[DataContainer]]

# initialize query dataloader
query_data_loader = DataLoader(
dataset,
Expand Down Expand Up @@ -260,12 +261,12 @@ def build_dataloader(dataset: Dataset,
query_data_loader=query_data_loader,
support_data_loader=support_data_loader)
elif isinstance(dataset, TwoBranchDataset):
from .dataloader_wrappers import TwoBranchDataloader
from mmfewshot.utils import multi_pipeline_collate_fn
from .dataloader_wrappers import TwoBranchDataloader

# `TwoBranchDataset` will return a list of DataContainer
# `multi_pipeline_collate_fn` are designed to handle
# the data with list[list[DataContainer]]

# initialize main dataloader
main_data_loader = DataLoader(
dataset,
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[isort]
line_length = 79
multi_line_output = 0
known_standard_library = setuptools
extra_standard_library = setuptools
known_first_party = mmfewshot
known_third_party = cv2,mmcls,mmcv,mmdet,numpy,pytest,pytorch_sphinx_theme,terminaltables,torch,typing_extensions

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,9 @@ def parse_requirements(fname='requirements.txt', with_version=True):
CommandLine:
python -c "import setup; print(setup.parse_requirements())"
"""
import re
import sys
from os.path import exists
import re
require_fpath = fname

def parse_line(line):
Expand Down

0 comments on commit 839115f

Please sign in to comment.