From 4ea611294aaa6f20b07ab53db88faf88aa6ff18d Mon Sep 17 00:00:00 2001 From: Nelson Brochado Date: Thu, 26 Oct 2017 01:18:15 +0200 Subject: [PATCH] Adding gradient descent optimisation algorithm for single-variable functions --- .gitignore | 17 ++++--- .travis.yml | 8 +++ ands/algorithms/numerical/README.md | 2 +- ands/algorithms/numerical/gradient_descent.py | 50 +++++++++++++++++++ .../polynomial_interpolation_tests.py | 50 +++++++++++-------- .../numerical/test_gradient_descent.py | 39 +++++++++++++++ tests/algorithms/numerical/test_horner.py | 4 +- 7 files changed, 137 insertions(+), 33 deletions(-) create mode 100644 ands/algorithms/numerical/gradient_descent.py create mode 100644 tests/algorithms/numerical/test_gradient_descent.py diff --git a/.gitignore b/.gitignore index 96e5e2e2..24635aac 100644 --- a/.gitignore +++ b/.gitignore @@ -1,24 +1,25 @@ -# Files - -ands/algorithms/greedy/huffman.py ands/ds/Graph.py ands/ds/DirectedGraph.py ands/ds/UndirectedGraph.py + +ands/algorithms/dp/previous_larger_element.py + +ands/algorithms/greedy/huffman.py /ands/ds/MinPriorityQueue.py + ands/algorithms/graphs/ -ands/algorithms/unclassified/ /ands/algorithms/math/combinatorics/ /ands/algorithms/math/ -ands/algorithms/dp/previous_larger_element.py *.eggx *.py[cod] *$py.class .DS_Store -notes/ -.idea/ -_ignore/ venv/ *.egg-info/ __pycache__/ + +notes/ +.idea/ +_ignore/ diff --git a/.travis.yml b/.travis.yml index 9516ee79..f4555e65 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,9 +10,17 @@ python: # command to install dependencies install: + #- pip install matplotlib - pip install coveralls - pip install -e . +# For plots when testing. +# See: https://stackoverflow.com/a/35403128/3924118 +#before_script: # configure a headless display to test plot generation +#- "export DISPLAY=:99.0" +#- "sh -e /etc/init.d/xvfb start" +#- sleep 3 # give xvfb some time to start + # command to run tests script: scripts/travis.sh diff --git a/ands/algorithms/numerical/README.md b/ands/algorithms/numerical/README.md index baec85a2..2536e84b 100644 --- a/ands/algorithms/numerical/README.md +++ b/ands/algorithms/numerical/README.md @@ -27,8 +27,8 @@ cause a considerably larger error in the final output. ## TODO -- Lagrange interpolation using barycentric form - Newton's form (using divided differences) +- De Casteljau's algorithm ## References diff --git a/ands/algorithms/numerical/gradient_descent.py b/ands/algorithms/numerical/gradient_descent.py new file mode 100644 index 00000000..64db085d --- /dev/null +++ b/ands/algorithms/numerical/gradient_descent.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + + +""" +# Meta-info + +Author: Nelson Brochado + +Created: 14/10/2017 + +Updated: 26/10/2017 + +# Description + +An implementation of the gradient descent method for finding local minima of +single-variable functions. + +# References + +- https://en.wikipedia.org/wiki/Gradient_descent +""" + +__all__ = ["gradient_descent"] + + +def gradient_descent(x0: float, + df: callable, + step_size: float = 0.01, + max_iter: int = 50, + tol: float = 1e-6): + """Finds a local minimum of a function whose derivative is df starting from + an initial guess x0 using a step size = step_size.""" + + # From calculation, it is expected that the local minimum occurs at x=9/4 + if not callable(df): + raise TypeError("df must be a callable object.") + + x = x0 + + for _ in range(max_iter): + x_next = x + -step_size * df(x) # Gradient descent step. + + if abs(x_next - x) < tol * abs(x_next): + x = x_next + break + + x = x_next + + return x diff --git a/tests/algorithms/numerical/polynomial_interpolation_tests.py b/tests/algorithms/numerical/polynomial_interpolation_tests.py index 6471afc6..d09d7ed3 100644 --- a/tests/algorithms/numerical/polynomial_interpolation_tests.py +++ b/tests/algorithms/numerical/polynomial_interpolation_tests.py @@ -19,7 +19,6 @@ from math import isclose, sqrt from random import uniform -# import matplotlib.pyplot as plt from scipy.interpolate import barycentric_interpolate @@ -32,27 +31,6 @@ def g(x: float) -> float: return 1 / sqrt(x) -# def plot_polynomial(algorithm, func, max_degree=40, start=-2.0, end=2.0, -# num=50): -# """Interpolation of function f with a polynomial p at the equidistant -# points x[k] = −1 + 2 * (k / n), k = 0, ..., n""" -# -# # n points, so polynomial would be of degree n - 1. -# for n in range(max_degree): -# xs = [-1 + 2 * (k / n) for k in range(n)] -# ys = [func(x) for x in xs] # Evaluate the function at all xs points. -# # print("ys =", ys) -# -# px = np.linspace(start, end, num) -# py = np.array([algorithm(xs, ys, x0) for x0 in px]) -# # print("py =", py) -# -# plt.scatter(xs, ys, color='r') -# plt.plot(px, py, color='b') -# -# plt.show() - - class PolynomialInterpolationTests: def __init__(self, polynomial_interpolation_algorithm): self.algorithm = polynomial_interpolation_algorithm @@ -92,3 +70,31 @@ def test_g(self): self.assertTrue(isclose(y0, 0.106, rel_tol=1e-02)) self.assertTrue(isclose(bi0, y0, rel_tol=1e-02)) + + +''' +# An example of a plot of the polynomial that interpolates the points of a +# function. + +if __name__ == "__main__": + import matplotlib.pyplot as plt + import numpy as np + + """Interpolation of function f with a polynomial p at the equidistant points + x[k] = −1 + 2 * (k / n), k = 0, ..., n""" + + # n points, so polynomial would be of degree n - 1. + for n in [10, 20, 40]: + xs = [-1 + 2 * (k / n) for k in range(n + 1)] + ys = [f(x) for x in xs] # Evaluate the function at all xs points. + + px = np.linspace(-1.0, 1.0, 1001) + py = np.array([barycentric(xs, ys, x0) for x0 in px]) + + plt.scatter(xs, ys, s=2) + plt.semilogy(xs, ys, linewidth=0.5) + plt.semilogy(px, py, linewidth=0.5) + plt.margins(x=0.004) + + plt.show() +''' diff --git a/tests/algorithms/numerical/test_gradient_descent.py b/tests/algorithms/numerical/test_gradient_descent.py new file mode 100644 index 00000000..68a4b0df --- /dev/null +++ b/tests/algorithms/numerical/test_gradient_descent.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + + +""" +# Meta-info + +Author: Nelson Brochado + +Created: 26/10/2017 + +Updated: 26/10/2017 + +# Description + +Unittests for the functions inside ands.algorithms.numerical.gradient_descent.py. +""" + +import unittest + +from ands.algorithms.numerical.gradient_descent import * + +''' +def f(x: float) -> float: + return x ** 4 - 3 * x ** 3 + 2 +''' + + +def df(x: float) -> float: + """Derivative of f.""" + return 4 * x ** 3 - 9 * x ** 2 + + +class TestGradientDescent(unittest.TestCase): + def test_type_error_when_df_not_callable(self): + self.assertRaises(TypeError, gradient_descent, 0.3, 5) + + def test_find_local_min_of_f(self): + self.assertAlmostEqual(gradient_descent(6, df), 2.24674, 5) diff --git a/tests/algorithms/numerical/test_horner.py b/tests/algorithms/numerical/test_horner.py index b5eecfa9..d7c91d52 100644 --- a/tests/algorithms/numerical/test_horner.py +++ b/tests/algorithms/numerical/test_horner.py @@ -33,5 +33,5 @@ def setUp(self): self.coefficients = [uniform(-10, 10) for _ in range(self.degree + 1)] def test_one(self): - self.assertEqual(horner(self.x0, self.coefficients), - polyval(self.x0, self.coefficients)) + self.assertAlmostEqual(horner(self.x0, self.coefficients), + polyval(self.x0, self.coefficients))