From 1aaacf31f255a03455069e401226f1e851056ab9 Mon Sep 17 00:00:00 2001 From: Miles Lubin Date: Tue, 27 Oct 2015 13:24:51 -0400 Subject: [PATCH] deprecate numerical integration --- README.md | 24 ++++------------------- src/Calculus.jl | 4 ++-- src/integrate.jl | 49 ----------------------------------------------- test/integrate.jl | 39 ------------------------------------- test/runtests.jl | 1 - 5 files changed, 6 insertions(+), 111 deletions(-) delete mode 100644 src/integrate.jl delete mode 100644 test/integrate.jl diff --git a/README.md b/README.md index bd1c07f..0eb0b14 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,6 @@ Most users will want to work with a limited set of basic functions: * `second_derivative()`: Use this for functions from R to R * `gradient()`: Use this for functions from R^n to R * `hessian()`: Use this for functions from R^n to R -* `integrate()`: Use this to integrate functions from R to R * `differentiate()`: Use this to perform symbolic differentiation * `simplify()`: Use this to perform symbolic simplification * `deparse()`: Use this to get usual infix representation of expressions @@ -104,23 +103,6 @@ derivative you calculate: f''(1.0) - (-sin(1.0)) f'''(1.0) - (-cos(1.0)) -## Integration using Simpson's Rule - - using Calculus - - # Compare with log(2) - integrate(x -> 1 / x, 1.0, 2.0) - - # Compare with cos(pi) - cos(0) - integrate(x -> -sin(x), 0.0, float64(pi)) - -## Integration using Monte Carlo method - - using Calculus - - # Compare with cos(pi) - cos(0) - integrate(x -> -sin(x), 0.0, float64(pi), :monte_carlo) - ## Symbolic Differentiation using Calculus @@ -128,9 +110,10 @@ derivative you calculate: differentiate("cos(x) + sin(x) + exp(-x) * cos(x)", :x) differentiate("cos(x) + sin(y) + exp(-x) * cos(y)", [:x, :y]) -# Coming Soon +## Numerical Integration -* Finite differencing based on complex numbers +The Calculus package no longer provides routines for univariate numerical integration. +Use the ``quadgk`` method from base Julia instead. # Credits @@ -142,6 +125,7 @@ Calculus.jl is built on contributions from: * Nathaniel Daw * Blake Johnson * Avik Sengupta +* Miles Lubin And draws inspiration and ideas from: diff --git a/src/Calculus.jl b/src/Calculus.jl index f38ea28..93a7ddf 100644 --- a/src/Calculus.jl +++ b/src/Calculus.jl @@ -12,7 +12,6 @@ module Calculus differentiate, gradient, hessian, - integrate, jacobian, second_derivative @@ -60,7 +59,8 @@ module Calculus include("finite_difference.jl") include("derivative.jl") include("check_derivative.jl") - include("integrate.jl") + @Base.deprecate integrate(f,a,b) quadgk(f,a,b)[1] + @Base.deprecate integrate(f,a,b,method) quadgk(f,a,b)[1] include("symbolic.jl") include("differentiate.jl") include("deparse.jl") diff --git a/src/integrate.jl b/src/integrate.jl deleted file mode 100644 index 28eff61..0000000 --- a/src/integrate.jl +++ /dev/null @@ -1,49 +0,0 @@ -function adaptive_simpsons_inner(f::Function, a::Real, b::Real, - epsilon::Real, S::Real, - fa::Real, fb::Real, fc::Real, bottom::Int) - c = (a + b) / 2 - h = b - a - d = (a + c) / 2 - g = (c + b) / 2 - fd = f(d) - fe = f(g) - Sleft = (h / 12) * (fa + 4 * fd + fc) - Sright = (h / 12) * (fc + 4 * fe + fb) - S2 = Sleft + Sright - if bottom <= 0 || abs(S2 - S) <= 15 * epsilon - return S2 + (S2 - S) / 15 - end - return adaptive_simpsons_inner(f, a, c, epsilon / 2, Sleft, fa, fc, fd, bottom - 1) + - adaptive_simpsons_inner(f, c, b, epsilon / 2, Sright, fc, fb, fe, bottom - 1) -end - -function adaptive_simpsons_outer(f::Function, a::Real, b::Real, - accuracy::Real=10e-10, max_iterations::Int=50) - c = (a + b) / 2 - h = b - a - fa = f(a) - fb = f(b) - fc = f(c) - S = (h / 6) * (fa + 4 * fc + fb) - return adaptive_simpsons_inner(f, a, b, accuracy, S, fa, fb, fc, max_iterations) -end - -function monte_carlo(f::Function, a::Real, b::Real, iterations::Int=10_000) - estimate = 0.0 - width = (b - a) - for i in 1:iterations - x = width * rand() + a - estimate += f(x) * width - end - return estimate / iterations -end - -function integrate(f::Function, a::Real, b::Real, method::Symbol=:simpsons) - if method == :simpsons - adaptive_simpsons_outer(f, a, b) - elseif method == :monte_carlo - monte_carlo(f, a, b) - else - error("Unknown method of integration: $(method)") - end -end diff --git a/test/integrate.jl b/test/integrate.jl deleted file mode 100644 index d95628f..0000000 --- a/test/integrate.jl +++ /dev/null @@ -1,39 +0,0 @@ -@test norm(integrate(x -> 1 / x, 1.0, 2.0) - log(2)) < 10e-8 -@test norm(integrate(x -> -sin(x), 0.0, pi) - (cos(pi) - cos(0.0))) < 10e-8 - -r = integrate(x -> 1, 0, 1) -@test norm(r - 1) < 10e-8 - -r = integrate(x -> x, 0, 1) -@test norm(r - 0.5) < 10e-8 - -r = integrate(x -> x * x, 0, 1) -@test norm(r - 1 / 3) < 10e-8 - -r = integrate(sin, 0, pi) -@test norm(r - 2) < 10e-8 - -r = integrate(cos, 0, pi) -@test norm(r - 0) < 10e-8 - -r = integrate(x -> sin(x)^2 + sin(x)^2, 0, pi) -@test norm(r - pi) < 10e-8 - -# Nice example, but requires Distributions -# require("Distributions") -# using Distributions -# r = integrate(x -> pdf(Normal(0.0, 1.0), x), -10, 10) -# @test norm(1 - r) < 10e-8 - -r = integrate(x -> 1 / x, 0.01, 1) -@test norm(r - 4.60517) < 10e-7 - -r = integrate(x -> cos(x)^8, 0, 2 * pi) -@test norm(r - 35 * pi / 64) < 10e-7 - -r = integrate(x -> sin(x) - sin(x^2) + sin(x^3), pi, 2 * pi) -@test norm(r - (-1.830467)) < 10e-7 - -# Monte Carlo integration tests -r = integrate(x -> sin(x), 0, pi, :monte_carlo) -@test norm(r - 2) < 10e-1 diff --git a/test/runtests.jl b/test/runtests.jl index 5ffc9d0..cfbdb52 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -8,7 +8,6 @@ using Base.Test tests = ["finite_difference", "derivative", "check_derivative", - "integrate", "symbolic", "deparse"]