Skip to content
This repository was archived by the owner on Jul 1, 2024. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -126,10 +126,10 @@ jobs:
# fallback to using the latest cache if no exact match is found
- v7-cpu-dependencies-

- <<: *install_dep

- <<: *install_dev_dep

- <<: *install_dep

- <<: *pip_list

- save_cache:
Expand Down Expand Up @@ -177,10 +177,10 @@ jobs:
# fallback to using the latest cache if no exact match is found
- v4-gpu-dependencies-

- <<: *install_dep

- <<: *install_dev_dep

- <<: *install_dep

- <<: *pip_list

- <<: *check_cuda_available
Expand Down Expand Up @@ -214,10 +214,10 @@ jobs:
# fallback to using the latest cache if no exact match is found
- v2-gpu-bc-dependencies-

- <<: *install_dep_bc

- <<: *install_dev_dep

- <<: *install_dep_bc

- <<: *pip_list

- <<: *check_cuda_available
Expand Down
12 changes: 5 additions & 7 deletions classy_vision/models/regnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,17 +447,15 @@ class RegNet(ClassyModel):
def __init__(self, params: RegNetParams):
super().__init__()

if params.activation_type == ActivationType.SILU and get_torch_version() < [
1,
7,
]:
raise RuntimeError("SiLU activation is only supported since PyTorch 1.7")

silu = None if get_torch_version() < [1, 7] else nn.SiLU()
activation = {
ActivationType.RELU: nn.ReLU(params.relu_in_place),
ActivationType.SILU: nn.SiLU(),
ActivationType.SILU: silu,
}[params.activation_type]

if activation is None:
raise RuntimeError("SiLU activation is only supported since PyTorch 1.7")

# Ad hoc stem
self.stem = {
StemType.RES_STEM_CIFAR: ResStemCifar,
Expand Down
20 changes: 12 additions & 8 deletions test/optim_sharded_sgd_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,25 +25,29 @@
from classy_vision.optim.zero import ZeRO


def dist_init(rank, world_size):
os.environ["MASTER_ADDR"] = "localhost"
os.environ["MASTER_PORT"] = "29500"
dist.init_process_group(backend=dist.Backend.GLOO, rank=rank, world_size=world_size)
def dist_init(rank, world_size, filename):
dist.init_process_group(
init_method="file://" + filename,
backend=dist.Backend.GLOO,
rank=rank,
world_size=world_size,
)


class TestOptimizerStateShardingIntegration(unittest.TestCase, TestOptimizer):
@staticmethod
def _maybe_destro_dist():
def _maybe_destroy_dist():
if dist.is_initialized():
logging.debug("Destroy previous torch dist process group")
dist.destroy_process_group()

def setUp(self):
self._maybe_destro_dist()
dist_init(0, 1)
self._maybe_destroy_dist()
self.filename = tempfile.NamedTemporaryFile(delete=True).name
dist_init(0, 1, self.filename)

def tearDown(self):
self._maybe_destro_dist()
self._maybe_destroy_dist()

def _get_config(self):
return {"name": "zero", "base_optimizer": {"name": "sgd"}, "num_epochs": 3}
Expand Down