Skip to content

Commit

Permalink
Merge ba916e2 into 789330e
Browse files Browse the repository at this point in the history
  • Loading branch information
HAOCHENYE committed Mar 14, 2023
2 parents 789330e + ba916e2 commit f50d3b9
Show file tree
Hide file tree
Showing 3 changed files with 76 additions and 7 deletions.
63 changes: 62 additions & 1 deletion .circleci/test.yml
Expand Up @@ -134,6 +134,57 @@ jobs:
command: |
docker exec mmengine python -m pytest tests/
build_integration_test:
parameters:
torch:
type: string
cuda:
type: string
cudnn:
type: integer
default: 7
machine:
image: ubuntu-2004-cuda-11.4:202110-01
docker_layer_caching: true
resource_class: gpu.nvidia.small
steps:
- checkout
- run:
name: Build Docker image
command: |
docker build .circleci/docker -t mmengine:gpu --build-arg PYTORCH=<< parameters.torch >> --build-arg CUDA=<< parameters.cuda >> --build-arg CUDNN=<< parameters.cudnn >>
docker run --gpus all -t -d -v /home/circleci/project:/mmengine -w /mmengine --name mmengine mmengine:gpu
- run:
name: Build MMEngine from source
command: |
docker exec mmengine pip install -e . -v
- run:
name: Install unit tests dependencies
command: |
docker exec mmengine pip install -r requirements/tests.txt
docker exec mmengine pip install openmim
docker exec mmengine mim install 'mmcv>=2.0.0rc1'
- run:
name: Install down stream repositories
command: |
docker exec mmengine mim install 'mmdet>=3.0.0rc0'
- run:
name: Run integration tests
command: |
docker exec mmengine pytest tests/test_infer/test_infer.py
- run:
name: Install down stream repositories from source
# TODO: Switch to master branch
command: |
docker exec mmengine pip uninstall mmdet -y
docker exec mmengine apt install git -y
docker exec mmengine mkdir downstream_repos
docker exec mmengine git clone -b 3.x https://github.com/open-mmlab/mmdetection.git ./downstream_repos/mmdetection
- run:
name: Run inferencer tests
command: |
docker exec -e PYTHONPATH=./downstream_repos/mmdetection mmengine pytest tests/test_infer/test_infer.py
workflows:
pr_stage_lint:
when: << pipeline.parameters.lint_only >>
Expand Down Expand Up @@ -173,10 +224,20 @@ workflows:
python: 3.9.0
requires:
- minimum_version_cpu
- hold_integration_test:
type: approval
requires:
- lint
- build_integration_test:
name: integration_test
torch: 1.8.1
cuda: "10.2"
requires:
- hold_integration_test
- hold:
type: approval
requires:
- maximum_version_cpu
- lint
- build_cuda:
name: mainstream_version_gpu
torch: 1.8.1
Expand Down
4 changes: 2 additions & 2 deletions mmengine/infer/infer.py
Expand Up @@ -419,9 +419,9 @@ def _get_repo_or_mim_dir(scope):
return repo_dir
else:
mim_dir = osp.join(package_path, '.mim')
if not osp.exists(osp.join(mim_dir, 'Configs')):
if not osp.exists(osp.join(mim_dir, 'configs')):
raise FileNotFoundError(
f'Cannot find Configs directory in {package_path}!, '
f'Cannot find `configs` directory in {package_path}!, '
f'please check the completeness of the {scope}.')
return mim_dir

Expand Down
16 changes: 12 additions & 4 deletions tests/test_infer/test_infer.py
Expand Up @@ -10,10 +10,18 @@
from mmengine.infer import BaseInferencer
from mmengine.registry import VISUALIZERS, DefaultScope
from mmengine.testing import RunnerTestCase
from mmengine.utils import is_installed, is_list_of
from mmengine.utils import is_list_of
from mmengine.visualization import Visualizer


def is_imported(package):
try:
__import__(package)
return True
except ImportError:
return False


class ToyInferencer(BaseInferencer):
preprocess_kwargs = {'pre_arg'}
forward_kwargs = {'for_arg'}
Expand Down Expand Up @@ -98,7 +106,7 @@ def test_init(self):
ToyInferencer([self.epoch_based_cfg], self.ckpt_path)

# Pass model as model name defined in metafile
if is_installed('mmdet'):
if is_imported('mmdet'):
from mmdet.utils import register_all_modules

register_all_modules()
Expand Down Expand Up @@ -126,7 +134,7 @@ def test_call(self):
inferencer(img_paths)

@pytest.mark.skipif(
not is_installed('mmdet'), reason='mmdet is not installed')
not is_imported('mmdet'), reason='mmdet is not installed')
def test_load_model_from_meta(self):
from mmdet.utils import register_all_modules

Expand Down Expand Up @@ -210,7 +218,7 @@ def test_preprocess(self):
self.assertTrue(is_list_of(data, torch.Tensor))

@pytest.mark.skipif(
not is_installed('mmdet'), reason='mmdet is not installed')
not is_imported('mmdet'), reason='mmdet is not installed')
def test_list_models(self):
model_list = BaseInferencer.list_models('mmdet')
self.assertTrue(len(model_list) > 0)
Expand Down

0 comments on commit f50d3b9

Please sign in to comment.