Skip to content
This repository has been archived by the owner on Jun 24, 2021. It is now read-only.

Commit

Permalink
Compatible with Keras 2.3.0
Browse files Browse the repository at this point in the history
  • Loading branch information
CyberZHG committed Sep 18, 2019
1 parent 3dbdc01 commit de55316
Show file tree
Hide file tree
Showing 7 changed files with 42 additions and 23 deletions.
16 changes: 9 additions & 7 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
dist: xenial
language: python
python:
- 3.6
python: "3.6"
env:
- KERAS_BACKEND=tensorflow TF_KERAS=1 TF_2=1
- KERAS_BACKEND=tensorflow TF_KERAS=1 TF_EAGER=1
- KERAS_BACKEND=tensorflow TF_KERAS=1
- KERAS_BACKEND=tensorflow
- KERAS_BACKEND=theano THEANO_FLAGS=optimizer=fast_compile
global:
- COVERALLS_PARALLEL=true
matrix:
- KERAS_BACKEND=tensorflow TF_KERAS=1 TF_2=1
- KERAS_BACKEND=tensorflow TF_KERAS=1 TF_EAGER=1
- KERAS_BACKEND=tensorflow TF_KERAS=1
- KERAS_BACKEND=tensorflow
- KERAS_BACKEND=theano THEANO_FLAGS=optimizer=fast_compile
install:
- wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
- bash miniconda.sh -b -p $HOME/miniconda
Expand Down
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@ Unofficial implementation of the [lookahead mechanism](https://arxiv.org/pdf/190
pip install keras-lookahead
```

## External Links

- [tensorflow/addons:LookAhead](https://github.com/tensorflow/addons/blob/master/tensorflow_addons/optimizers/lookahead.py)

## Usage

Arguments:
Expand Down
4 changes: 4 additions & 0 deletions README.zh-CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@
pip install keras-lookahead
```

## 外部链接

- [tensorflow/addons:LookAhead](https://github.com/tensorflow/addons/blob/master/tensorflow_addons/optimizers/lookahead.py)

## 使用

参数:
Expand Down
10 changes: 9 additions & 1 deletion keras_lookahead/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,20 @@ def lr(self):
def lr(self, lr):
self.optimizer.lr = lr

@property
def learning_rate(self):
return self.optimizer.learning_rate

@learning_rate.setter
def learning_rate(self, learning_rate):
self.optimizer.learning_rate = learning_rate

@property
def iterations(self):
return self.optimizer.iterations

def get_updates(self, loss, params):
sync_cond = K.equal((self.iterations + 1) % self.sync_period, 0)
sync_cond = K.equal((self.iterations + 1) // self.sync_period * self.sync_period, (self.iterations + 1))
if TF_KERAS:
slow_params = [K.variable(K.get_value(p), name='sp_{}'.format(i)) for i, p in enumerate(params)]
self.updates = self.optimizer.get_updates(loss, params)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

setup(
name='keras-lookahead',
version='0.5.0',
version='0.6.0',
packages=find_packages(),
url='https://github.com/CyberZHG/keras-lookahead',
license='MIT',
Expand Down
4 changes: 2 additions & 2 deletions tests/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def __init__(self, lr=0.001, beta_1=0.9, beta_2=0.999, **kwargs):
super(Adam, self).__init__(**kwargs)
with K.name_scope(self.__class__.__name__):
self.iterations = K.variable(0, dtype='int64', name='iterations')
self.lr = K.variable(lr, name='lr')
self.learning_rate = K.variable(lr, name='lr')
self.beta_1 = K.variable(beta_1, name='beta_1')
self.beta_2 = K.variable(beta_2, name='beta_2')
self.epsilon = K.epsilon()
Expand All @@ -18,7 +18,7 @@ def get_updates(self, loss, params):
self.updates = [K.update_add(self.iterations, 1)]

t = K.cast(self.iterations, K.floatx()) + 1
lr_t = self.lr * (K.sqrt(1. - K.pow(self.beta_2, t)) / (1. - K.pow(self.beta_1, t)))
lr_t = self.learning_rate * (K.sqrt(1. - K.pow(self.beta_2, t)) / (1. - K.pow(self.beta_1, t)))

ms = [K.zeros(K.int_shape(p), dtype=K.dtype(p)) for p in params]
vs = [K.zeros(K.int_shape(p), dtype=K.dtype(p)) for p in params]
Expand Down
25 changes: 13 additions & 12 deletions tests/test_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,22 +61,23 @@ def test_ranger(self):
self.assertLess(np.max(np.abs(predicted - y)), 1e-3)

def test_half(self):
weight = np.random.standard_normal((5, 1))
x, y, _ = self._init_data(data_size=3200)
if TF_KERAS:
weight = np.random.standard_normal((5, 1))
x, y, _ = self._init_data(data_size=3200)

model = self._init_model('adam', w=weight)
model.fit(x, y, batch_size=32)
original = model.get_weights()[0]
model = self._init_model('adam', w=weight)
model.fit(x, y, batch_size=32)
original = model.get_weights()[0]

model = self._init_model(Lookahead('adam', sync_period=100, slow_step=0.5), w=weight)
model.fit(x, y, batch_size=32)
step_back = model.get_weights()[0]
model = self._init_model(Lookahead('adam', sync_period=100, slow_step=0.5), w=weight)
model.fit(x, y, batch_size=32)
step_back = model.get_weights()[0]

half_step = (weight + original) * 0.5
self.assertTrue(np.allclose(half_step, step_back, atol=1e-2))
half_step = (weight + original) * 0.5
self.assertTrue(np.allclose(half_step, step_back, atol=1e-2), (weight, original, step_back, half_step))

def test_lr(self):
opt = Lookahead('adam')
opt = Lookahead(RAdam())
K.set_value(opt.lr, 1e-4)
self.assertAlmostEqual(1e-4, K.get_value(opt.lr))
self.assertAlmostEqual(1e-4, K.get_value(opt.optimizer.lr))
Expand Down Expand Up @@ -118,4 +119,4 @@ def test_consistent(self):
model.fit(x, y, batch_size=32, shuffle=False)
loaded = model.get_weights()[0]

self.assertTrue(np.allclose(original, loaded, atol=1e-6))
self.assertTrue(np.allclose(original, loaded, atol=1e-4), (original, loaded))

0 comments on commit de55316

Please sign in to comment.