Skip to content

Commit

Permalink
add lr_scheduler tests 2
Browse files Browse the repository at this point in the history
  • Loading branch information
senwu committed Dec 2, 2019
1 parent 69b7780 commit b23e701
Show file tree
Hide file tree
Showing 8 changed files with 115 additions and 27 deletions.
1 change: 0 additions & 1 deletion src/emmental/learner.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,6 @@ def _update_lr_scheduler(self, model: EmmentalModel, step: int) -> None:
self.lr_scheduler.step() # type: ignore
elif (
opt in ["step", "multi_step"]
and step > 0
and (step + 1) % self.n_batches_per_epoch == 0
):
self.lr_scheduler.step() # type: ignore
Expand Down
9 changes: 4 additions & 5 deletions tests/lr_schedulers/test_cosine_annealing_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def test_cosine_annealing_scheduler(caplog):
Meta.reset()
emmental.init(dirpath)

# Test default Adam setting
config = {
"learner_config": {
"n_epochs": 4,
Expand All @@ -38,21 +37,21 @@ def test_cosine_annealing_scheduler(caplog):

assert emmental_learner.optimizer.param_groups[0]["lr"] == 10

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 0)
assert (
abs(emmental_learner.optimizer.param_groups[0]["lr"] - 8.535533905932738) < 1e-5
)

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 1)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 5) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 2)
assert (
abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1.4644660940672627)
< 1e-5
)

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 3)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"]) < 1e-5

shutil.rmtree(dirpath)
9 changes: 4 additions & 5 deletions tests/lr_schedulers/test_exponential_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def test_exponential_scheduler(caplog):
Meta.reset()
emmental.init(dirpath)

# Test default Adam setting
config = {
"learner_config": {
"n_epochs": 4,
Expand All @@ -41,16 +40,16 @@ def test_exponential_scheduler(caplog):

assert emmental_learner.optimizer.param_groups[0]["lr"] == 10

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 0)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 1)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.1) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 2)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.01) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 3)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.001) < 1e-5

shutil.rmtree(dirpath)
9 changes: 4 additions & 5 deletions tests/lr_schedulers/test_linear_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def test_linear_scheduler(caplog):
Meta.reset()
emmental.init(dirpath)

# Test default Adam setting
config = {
"learner_config": {
"n_epochs": 4,
Expand All @@ -38,16 +37,16 @@ def test_linear_scheduler(caplog):

assert emmental_learner.optimizer.param_groups[0]["lr"] == 10

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 0)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 7.5) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 1)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 5) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 2)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 2.5) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 3)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"]) < 1e-5

shutil.rmtree(dirpath)
9 changes: 4 additions & 5 deletions tests/lr_schedulers/test_multi_step_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def test_multi_step_scheduler(caplog):
Meta.reset()
emmental.init(dirpath)

# Test default Adam setting
config = {
"learner_config": {
"n_epochs": 4,
Expand All @@ -45,16 +44,16 @@ def test_multi_step_scheduler(caplog):

assert emmental_learner.optimizer.param_groups[0]["lr"] == 10

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 0)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 1)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 2)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.1) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 3)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.1) < 1e-5

shutil.rmtree(dirpath)
9 changes: 4 additions & 5 deletions tests/lr_schedulers/test_step_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def test_step_scheduler(caplog):
Meta.reset()
emmental.init(dirpath)

# Test default Adam setting
config = {
"learner_config": {
"n_epochs": 4,
Expand All @@ -41,16 +40,16 @@ def test_step_scheduler(caplog):

assert emmental_learner.optimizer.param_groups[0]["lr"] == 10

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 0)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 1)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 2)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 1) < 1e-5

emmental_learner.lr_scheduler.step()
emmental_learner._update_lr_scheduler(model, 3)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 0.1) < 1e-5

shutil.rmtree(dirpath)
89 changes: 89 additions & 0 deletions tests/lr_schedulers/test_warmup_scheduler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
import logging
import shutil

import torch.nn as nn

import emmental
from emmental import Meta
from emmental.learner import EmmentalLearner

logger = logging.getLogger(__name__)


def test_step_scheduler(caplog):
"""Unit test of step scheduler"""

caplog.set_level(logging.INFO)

dirpath = "temp_test_scheduler"
model = nn.Linear(1, 1)
emmental_learner = EmmentalLearner()

Meta.reset()
emmental.init(dirpath)

# Test default Adam setting
config = {
"learner_config": {
"n_epochs": 4,
"optimizer_config": {"optimizer": "sgd", "lr": 10},
"lr_scheduler_config": {
"lr_scheduler": None,
"warmup_steps": 2,
"warmup_unit": "epoch",
},
}
}
emmental.Meta.update_config(config)
emmental_learner.n_batches_per_epoch = 1
emmental_learner._set_optimizer(model)
emmental_learner._set_lr_scheduler(model)

assert emmental_learner.optimizer.param_groups[0]["lr"] == 0

emmental_learner._update_lr_scheduler(model, 0)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 5) < 1e-5

emmental_learner._update_lr_scheduler(model, 1)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5

emmental_learner._update_lr_scheduler(model, 2)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5

emmental_learner._update_lr_scheduler(model, 3)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5

Meta.reset()
emmental.init(dirpath)

config = {
"learner_config": {
"n_epochs": 4,
"optimizer_config": {"optimizer": "sgd", "lr": 10},
"lr_scheduler_config": {
"lr_scheduler": None,
"warmup_percentage": 0.5,
"warmup_unit": "epoch",
},
}
}
emmental.Meta.update_config(config)
emmental_learner.n_batches_per_epoch = 1
emmental_learner._set_optimizer(model)
emmental_learner._set_lr_scheduler(model)

assert emmental_learner.optimizer.param_groups[0]["lr"] == 0

emmental_learner._update_lr_scheduler(model, 0)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 5) < 1e-5

emmental_learner._update_lr_scheduler(model, 1)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5

emmental_learner._update_lr_scheduler(model, 2)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5

emmental_learner._update_lr_scheduler(model, 3)
assert abs(emmental_learner.optimizer.param_groups[0]["lr"] - 10) < 1e-5

shutil.rmtree(dirpath)
7 changes: 6 additions & 1 deletion tests/test_e2e.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,12 @@ def output(task_name, immediate_ouput_dict):

# Update learning config
Meta.update_config(
config={"learner_config": {"n_epochs": 10, "optimizer_config": {"lr": 0.01}}}
config={
"learner_config": {
"n_epochs": 10,
"optimizer_config": {"lr": 0.01, "grad_clip": 100},
}
}
)

# Learning
Expand Down

0 comments on commit b23e701

Please sign in to comment.