Skip to content
This repository has been archived by the owner on Jul 19, 2019. It is now read-only.

Commit

Permalink
Add optimiser base class
Browse files Browse the repository at this point in the history
  • Loading branch information
lewis-od committed Sep 21, 2017
1 parent ba5e4f4 commit 106821a
Show file tree
Hide file tree
Showing 7 changed files with 89 additions and 37 deletions.
14 changes: 10 additions & 4 deletions demo.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
from optimatic.grad_desc import Optimiser
from optimatic.optimisers.grad_desc import Optimiser
import numpy as np

minimum = np.random.normal(scale=5)
print("Actual minimum is: {}".format(minimum))

def f(x):
return (x - 5.4) ** 2
return (x - minimum) ** 2

def df(x):
return 2 * (x - 5.4)
return 2 * (x - minimum)

opt = Optimiser(f, df, np.random.normal(scale=5))
x = opt.optimise()

opt = Optimiser(f, df, 0.0)
print("Calculated minimum is: {}".format(x))
2 changes: 1 addition & 1 deletion docs/source/introduction.rst
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,4 @@ Then import and initialise the optimiser you want to use, e.g.:
Then run either :code:`opt.step()` to run one step of the chosen optimisation
algorithm, or :code:`opt.optimise()` to run until either :code:`opt.steps` is
exceeded, or :code:`opt.precision` is met. See
:func:`~optimatic.grad_desc.Optimiser` for more details.
:func:`~optimatic.optimisers.optimiser.Optimiser` for more details.
30 changes: 30 additions & 0 deletions docs/source/optimatic.optimisers.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
optimatic.optimisers package
============================

Submodules
----------

optimatic.optimisers.grad_desc module
-------------------------------------

.. automodule:: optimatic.optimisers.grad_desc
:members:
:undoc-members:
:show-inheritance:

optimatic.optimisers.optimiser module
-------------------------------------

.. automodule:: optimatic.optimisers.optimiser
:members:
:undoc-members:
:show-inheritance:


Module contents
---------------

.. automodule:: optimatic.optimisers
:members:
:undoc-members:
:show-inheritance:
13 changes: 1 addition & 12 deletions docs/source/optimatic.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,9 @@ Subpackages

.. toctree::

optimatic.optimisers
optimatic.utils

Submodules
----------

optimatic.grad_desc module
--------------------------

.. automodule:: optimatic.grad_desc
:members:
:undoc-members:
:show-inheritance:


Module contents
---------------

Expand Down
Empty file.
28 changes: 8 additions & 20 deletions optimatic/grad_desc.py → optimatic/optimisers/grad_desc.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,43 +11,31 @@
\mathbf{x}_n = \mathbf{x}_{n-1} - \gamma \\nabla f(\mathbf{x}_{n-1})
"""
import numpy as np
from optimiser import Optimiser as OptimiserBase

class Optimiser(object):
class Optimiser(OptimiserBase):
"""
:param y: The function to optimise
:param dy: The derivative of the function to optimise
:param x0: The starting position for the algorithm
:param precision: The precision to calculate the minimum to
:param gamma: The starting value for gamma
:param gamma: The starting value for :math:`\gamma`
:param steps: The max number of iterations of the algorithm to run
"""
def __init__(self, y, dy, x0, precision=0.0001, gamma=0.1, steps=1000):
self.y = y
def __init__(self, y, dy, x0, precision=0.0001, gamma=0.1, steps=10000):
super(Optimiser, self).__init__(y, x0, precision=precision, steps=steps)
self.dy = dy
self.precision = precision
self.step_size = x0
self.xn = x0
self.xn_1 = x0
self.gamma = gamma
self.steps = steps

def step(self):
"""Runs one iteration of the algorithm"""
self.xn_1 = self.xn
self.xn = self.xn_1 - self.gamma * self.dy(self.xn_1)

grad_diff = self.dy(self.xn) - self.dy(self.xn_1)
if grad_diff == 0.0:
# Algorithm has converged
return
xs_diff = self.xn - self.xn_1
self.gamma = np.dot(xs_diff, grad_diff)
self.gamma /= np.linalg.norm(grad_diff) ** 2

def optimise(self):
"""Runs :func:`step` the specified number of times"""
i = 0
self.step()
step_size = np.linalg.norm(self.xn - self.xn_1)
while step_size < self.precision and i < self.steps:
self.step()
step_size = np.linalg.norm(self.xn - self.xn_1)
i += 1
return self.xn
39 changes: 39 additions & 0 deletions optimatic/optimisers/optimiser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
"""
Optimiser base class
All optimiser classes should inherit from this class
"""
from abc import ABCMeta, abstractmethod
import numpy as np

class Optimiser(object):
"""
:param y: The function to optimise
:param x0: The starting position for the algorithm
:param precision: The precision to calculate the minimum to
:param steps: The max number of iterations of the algorithm to run
"""
__metaclass__ = ABCMeta

def __init__(self, y, x0, precision=1e-4, steps=10000):
self.y = y
self.xn = x0
self.xn_1 = x0
self.precision = precision
self.steps = steps

@abstractmethod
def step(self):
"""Runs one iteration of the algorithm"""
return

def optimise(self):
"""Runs :func:`step` the specified number of times"""
i = 0
self.step()
step_size = np.linalg.norm(self.xn - self.xn_1)
while step_size > self.precision and i < self.steps:
self.step()
step_size = np.linalg.norm(self.xn - self.xn_1)
i += 1
return self.xn

0 comments on commit 106821a

Please sign in to comment.