-
Notifications
You must be signed in to change notification settings - Fork 18
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
12 changed files
with
429 additions
and
17 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
import logging | ||
import shutil | ||
|
||
import torch.nn as nn | ||
|
||
import emmental | ||
from emmental import Meta | ||
from emmental.learner import EmmentalLearner | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def test_cosine_annealing_scheduler(caplog): | ||
"""Unit test of cosine annealing scheduler""" | ||
|
||
caplog.set_level(logging.INFO) | ||
|
||
lr_scheduler = "cosine_annealing" | ||
dirpath = "temp_test_scheduler" | ||
model = nn.Linear(1, 1) | ||
emmental_learner = EmmentalLearner() | ||
|
||
Meta.reset() | ||
emmental.init(dirpath) | ||
|
||
config = { | ||
"learner_config": { | ||
"n_epochs": 4, | ||
"optimizer_config": {"optimizer": "sgd", "lr": 10}, | ||
"lr_scheduler_config": {"lr_scheduler": lr_scheduler}, | ||
} | ||
} | ||
emmental.Meta.update_config(config) | ||
emmental_learner.n_batches_per_epoch = 1 | ||
emmental_learner._set_optimizer(model) | ||
emmental_learner._set_lr_scheduler(model) | ||
|
||
assert emmental_learner.optimizer.param_groups[0]["lr"] == 10 | ||
|
||
emmental_learner._update_lr_scheduler(model, 0) | ||
assert ( | ||
abs(emmental_learner.optimizer.param_groups[0]["lr"] - 8.535533905932738) < 1e-5 | ||
) | ||
|
||
emmental_learner._update_lr_scheduler(model, 1) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 5) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 2) | ||
assert ( | ||
abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1.4644660940672627) | ||
< 1e-5 | ||
) | ||
|
||
emmental_learner._update_lr_scheduler(model, 3) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"]) < 1e-5 | ||
|
||
shutil.rmtree(dirpath) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
import logging | ||
import shutil | ||
|
||
import torch.nn as nn | ||
|
||
import emmental | ||
from emmental import Meta | ||
from emmental.learner import EmmentalLearner | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def test_exponential_scheduler(caplog): | ||
"""Unit test of exponential scheduler""" | ||
|
||
caplog.set_level(logging.INFO) | ||
|
||
lr_scheduler = "exponential" | ||
dirpath = "temp_test_scheduler" | ||
model = nn.Linear(1, 1) | ||
emmental_learner = EmmentalLearner() | ||
|
||
Meta.reset() | ||
emmental.init(dirpath) | ||
|
||
config = { | ||
"learner_config": { | ||
"n_epochs": 4, | ||
"optimizer_config": {"optimizer": "sgd", "lr": 10}, | ||
"lr_scheduler_config": { | ||
"lr_scheduler": lr_scheduler, | ||
"exponential_config": {"gamma": 0.1}, | ||
}, | ||
} | ||
} | ||
emmental.Meta.update_config(config) | ||
emmental_learner.n_batches_per_epoch = 1 | ||
emmental_learner._set_optimizer(model) | ||
emmental_learner._set_lr_scheduler(model) | ||
|
||
assert emmental_learner.optimizer.param_groups[0]["lr"] == 10 | ||
|
||
emmental_learner._update_lr_scheduler(model, 0) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 1) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.1) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 2) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.01) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 3) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.001) < 1e-5 | ||
|
||
shutil.rmtree(dirpath) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
import logging | ||
import shutil | ||
|
||
import torch.nn as nn | ||
|
||
import emmental | ||
from emmental import Meta | ||
from emmental.learner import EmmentalLearner | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def test_linear_scheduler(caplog): | ||
"""Unit test of linear scheduler""" | ||
|
||
caplog.set_level(logging.INFO) | ||
|
||
lr_scheduler = "linear" | ||
dirpath = "temp_test_scheduler" | ||
model = nn.Linear(1, 1) | ||
emmental_learner = EmmentalLearner() | ||
|
||
Meta.reset() | ||
emmental.init(dirpath) | ||
|
||
config = { | ||
"learner_config": { | ||
"n_epochs": 4, | ||
"optimizer_config": {"optimizer": "sgd", "lr": 10}, | ||
"lr_scheduler_config": {"lr_scheduler": lr_scheduler}, | ||
} | ||
} | ||
emmental.Meta.update_config(config) | ||
emmental_learner.n_batches_per_epoch = 1 | ||
emmental_learner._set_optimizer(model) | ||
emmental_learner._set_lr_scheduler(model) | ||
|
||
assert emmental_learner.optimizer.param_groups[0]["lr"] == 10 | ||
|
||
emmental_learner._update_lr_scheduler(model, 0) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 7.5) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 1) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 5) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 2) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 2.5) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 3) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"]) < 1e-5 | ||
|
||
shutil.rmtree(dirpath) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
import logging | ||
import shutil | ||
|
||
import torch.nn as nn | ||
|
||
import emmental | ||
from emmental import Meta | ||
from emmental.learner import EmmentalLearner | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def test_multi_step_scheduler(caplog): | ||
"""Unit test of multi step scheduler""" | ||
|
||
caplog.set_level(logging.INFO) | ||
|
||
lr_scheduler = "multi_step" | ||
dirpath = "temp_test_scheduler" | ||
model = nn.Linear(1, 1) | ||
emmental_learner = EmmentalLearner() | ||
|
||
Meta.reset() | ||
emmental.init(dirpath) | ||
|
||
config = { | ||
"learner_config": { | ||
"n_epochs": 4, | ||
"optimizer_config": {"optimizer": "sgd", "lr": 10}, | ||
"lr_scheduler_config": { | ||
"lr_scheduler": lr_scheduler, | ||
"multi_step_config": { | ||
"milestones": [1, 3], | ||
"gamma": 0.1, | ||
"last_epoch": -1, | ||
}, | ||
}, | ||
} | ||
} | ||
emmental.Meta.update_config(config) | ||
emmental_learner.n_batches_per_epoch = 1 | ||
emmental_learner._set_optimizer(model) | ||
emmental_learner._set_lr_scheduler(model) | ||
|
||
assert emmental_learner.optimizer.param_groups[0]["lr"] == 10 | ||
|
||
emmental_learner._update_lr_scheduler(model, 0) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 1) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 2) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.1) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 3) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.1) < 1e-5 | ||
|
||
shutil.rmtree(dirpath) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
import logging | ||
import shutil | ||
|
||
import torch.nn as nn | ||
|
||
import emmental | ||
from emmental import Meta | ||
from emmental.learner import EmmentalLearner | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def test_step_scheduler(caplog): | ||
"""Unit test of step scheduler""" | ||
|
||
caplog.set_level(logging.INFO) | ||
|
||
lr_scheduler = "step" | ||
dirpath = "temp_test_scheduler" | ||
model = nn.Linear(1, 1) | ||
emmental_learner = EmmentalLearner() | ||
|
||
Meta.reset() | ||
emmental.init(dirpath) | ||
|
||
config = { | ||
"learner_config": { | ||
"n_epochs": 4, | ||
"optimizer_config": {"optimizer": "sgd", "lr": 10}, | ||
"lr_scheduler_config": { | ||
"lr_scheduler": lr_scheduler, | ||
"step_config": {"step_size": 2, "gamma": 0.1, "last_epoch": -1}, | ||
}, | ||
} | ||
} | ||
emmental.Meta.update_config(config) | ||
emmental_learner.n_batches_per_epoch = 1 | ||
emmental_learner._set_optimizer(model) | ||
emmental_learner._set_lr_scheduler(model) | ||
|
||
assert emmental_learner.optimizer.param_groups[0]["lr"] == 10 | ||
|
||
emmental_learner._update_lr_scheduler(model, 0) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 1) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 2) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 3) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.1) < 1e-5 | ||
|
||
shutil.rmtree(dirpath) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,90 @@ | ||
import logging | ||
import shutil | ||
|
||
import torch.nn as nn | ||
|
||
import emmental | ||
from emmental import Meta | ||
from emmental.learner import EmmentalLearner | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def test_step_scheduler(caplog): | ||
"""Unit test of step scheduler""" | ||
|
||
caplog.set_level(logging.INFO) | ||
|
||
dirpath = "temp_test_scheduler" | ||
model = nn.Linear(1, 1) | ||
emmental_learner = EmmentalLearner() | ||
|
||
Meta.reset() | ||
emmental.init(dirpath) | ||
|
||
# Test warmup steps | ||
config = { | ||
"learner_config": { | ||
"n_epochs": 4, | ||
"optimizer_config": {"optimizer": "sgd", "lr": 10}, | ||
"lr_scheduler_config": { | ||
"lr_scheduler": None, | ||
"warmup_steps": 2, | ||
"warmup_unit": "batch", | ||
}, | ||
} | ||
} | ||
emmental.Meta.update_config(config) | ||
emmental_learner.n_batches_per_epoch = 1 | ||
emmental_learner._set_optimizer(model) | ||
emmental_learner._set_lr_scheduler(model) | ||
|
||
assert emmental_learner.optimizer.param_groups[0]["lr"] == 0 | ||
|
||
emmental_learner._update_lr_scheduler(model, 0) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 5) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 1) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 2) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 3) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5 | ||
|
||
Meta.reset() | ||
emmental.init(dirpath) | ||
|
||
# Test warmup percentage | ||
config = { | ||
"learner_config": { | ||
"n_epochs": 4, | ||
"optimizer_config": {"optimizer": "sgd", "lr": 10}, | ||
"lr_scheduler_config": { | ||
"lr_scheduler": None, | ||
"warmup_percentage": 0.5, | ||
"warmup_unit": "epoch", | ||
}, | ||
} | ||
} | ||
emmental.Meta.update_config(config) | ||
emmental_learner.n_batches_per_epoch = 1 | ||
emmental_learner._set_optimizer(model) | ||
emmental_learner._set_lr_scheduler(model) | ||
|
||
assert emmental_learner.optimizer.param_groups[0]["lr"] == 0 | ||
|
||
emmental_learner._update_lr_scheduler(model, 0) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 5) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 1) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 2) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5 | ||
|
||
emmental_learner._update_lr_scheduler(model, 3) | ||
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5 | ||
|
||
shutil.rmtree(dirpath) |
Oops, something went wrong.