Skip to content

Commit

Permalink
Adding gradient descent optimisation algorithm for single-variable fu…
Browse files Browse the repository at this point in the history
…nctions
  • Loading branch information
nbro committed Oct 25, 2017
1 parent 8c480da commit 4ea6112
Show file tree
Hide file tree
Showing 7 changed files with 137 additions and 33 deletions.
17 changes: 9 additions & 8 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,24 +1,25 @@
# Files

ands/algorithms/greedy/huffman.py
ands/ds/Graph.py
ands/ds/DirectedGraph.py
ands/ds/UndirectedGraph.py

ands/algorithms/dp/previous_larger_element.py

ands/algorithms/greedy/huffman.py
/ands/ds/MinPriorityQueue.py

ands/algorithms/graphs/
ands/algorithms/unclassified/
/ands/algorithms/math/combinatorics/
/ands/algorithms/math/
ands/algorithms/dp/previous_larger_element.py

*.eggx
*.py[cod]
*$py.class
.DS_Store

notes/
.idea/
_ignore/
venv/
*.egg-info/
__pycache__/

notes/
.idea/
_ignore/
8 changes: 8 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,17 @@ python:

# command to install dependencies
install:
#- pip install matplotlib
- pip install coveralls
- pip install -e .

# For plots when testing.
# See: https://stackoverflow.com/a/35403128/3924118
#before_script: # configure a headless display to test plot generation
#- "export DISPLAY=:99.0"
#- "sh -e /etc/init.d/xvfb start"
#- sleep 3 # give xvfb some time to start

# command to run tests
script: scripts/travis.sh

Expand Down
2 changes: 1 addition & 1 deletion ands/algorithms/numerical/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ cause a considerably larger error in the final output.

## TODO

- Lagrange interpolation using barycentric form
- Newton's form (using divided differences)
- De Casteljau's algorithm

## References

Expand Down
50 changes: 50 additions & 0 deletions ands/algorithms/numerical/gradient_descent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-


"""
# Meta-info
Author: Nelson Brochado
Created: 14/10/2017
Updated: 26/10/2017
# Description
An implementation of the gradient descent method for finding local minima of
single-variable functions.
# References
- https://en.wikipedia.org/wiki/Gradient_descent
"""

__all__ = ["gradient_descent"]


def gradient_descent(x0: float,
df: callable,
step_size: float = 0.01,
max_iter: int = 50,
tol: float = 1e-6):
"""Finds a local minimum of a function whose derivative is df starting from
an initial guess x0 using a step size = step_size."""

# From calculation, it is expected that the local minimum occurs at x=9/4
if not callable(df):
raise TypeError("df must be a callable object.")

x = x0

for _ in range(max_iter):
x_next = x + -step_size * df(x) # Gradient descent step.

if abs(x_next - x) < tol * abs(x_next):
x = x_next
break

x = x_next

return x
50 changes: 28 additions & 22 deletions tests/algorithms/numerical/polynomial_interpolation_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from math import isclose, sqrt
from random import uniform

# import matplotlib.pyplot as plt
from scipy.interpolate import barycentric_interpolate


Expand All @@ -32,27 +31,6 @@ def g(x: float) -> float:
return 1 / sqrt(x)


# def plot_polynomial(algorithm, func, max_degree=40, start=-2.0, end=2.0,
# num=50):
# """Interpolation of function f with a polynomial p at the equidistant
# points x[k] = −1 + 2 * (k / n), k = 0, ..., n"""
#
# # n points, so polynomial would be of degree n - 1.
# for n in range(max_degree):
# xs = [-1 + 2 * (k / n) for k in range(n)]
# ys = [func(x) for x in xs] # Evaluate the function at all xs points.
# # print("ys =", ys)
#
# px = np.linspace(start, end, num)
# py = np.array([algorithm(xs, ys, x0) for x0 in px])
# # print("py =", py)
#
# plt.scatter(xs, ys, color='r')
# plt.plot(px, py, color='b')
#
# plt.show()


class PolynomialInterpolationTests:
def __init__(self, polynomial_interpolation_algorithm):
self.algorithm = polynomial_interpolation_algorithm
Expand Down Expand Up @@ -92,3 +70,31 @@ def test_g(self):

self.assertTrue(isclose(y0, 0.106, rel_tol=1e-02))
self.assertTrue(isclose(bi0, y0, rel_tol=1e-02))


'''
# An example of a plot of the polynomial that interpolates the points of a
# function.
if __name__ == "__main__":
import matplotlib.pyplot as plt
import numpy as np
"""Interpolation of function f with a polynomial p at the equidistant points
x[k] = −1 + 2 * (k / n), k = 0, ..., n"""
# n points, so polynomial would be of degree n - 1.
for n in [10, 20, 40]:
xs = [-1 + 2 * (k / n) for k in range(n + 1)]
ys = [f(x) for x in xs] # Evaluate the function at all xs points.
px = np.linspace(-1.0, 1.0, 1001)
py = np.array([barycentric(xs, ys, x0) for x0 in px])
plt.scatter(xs, ys, s=2)
plt.semilogy(xs, ys, linewidth=0.5)
plt.semilogy(px, py, linewidth=0.5)
plt.margins(x=0.004)
plt.show()
'''
39 changes: 39 additions & 0 deletions tests/algorithms/numerical/test_gradient_descent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-


"""
# Meta-info
Author: Nelson Brochado
Created: 26/10/2017
Updated: 26/10/2017
# Description
Unittests for the functions inside ands.algorithms.numerical.gradient_descent.py.
"""

import unittest

from ands.algorithms.numerical.gradient_descent import *

'''
def f(x: float) -> float:
return x ** 4 - 3 * x ** 3 + 2
'''


def df(x: float) -> float:
"""Derivative of f."""
return 4 * x ** 3 - 9 * x ** 2


class TestGradientDescent(unittest.TestCase):
def test_type_error_when_df_not_callable(self):
self.assertRaises(TypeError, gradient_descent, 0.3, 5)

def test_find_local_min_of_f(self):
self.assertAlmostEqual(gradient_descent(6, df), 2.24674, 5)
4 changes: 2 additions & 2 deletions tests/algorithms/numerical/test_horner.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,5 @@ def setUp(self):
self.coefficients = [uniform(-10, 10) for _ in range(self.degree + 1)]

def test_one(self):
self.assertEqual(horner(self.x0, self.coefficients),
polyval(self.x0, self.coefficients))
self.assertAlmostEqual(horner(self.x0, self.coefficients),
polyval(self.x0, self.coefficients))

0 comments on commit 4ea6112

Please sign in to comment.