From 8aef56d659fb5f973adcb0f8ebf4815bb780a086 Mon Sep 17 00:00:00 2001 From: Kristoffer Carlsson Date: Mon, 23 Jul 2018 10:38:52 +0200 Subject: [PATCH] work around perf regression on 0.7 --- Project.toml | 1 + src/Distances.jl | 1 + src/metrics.jl | 2 +- test/runtests.jl | 1 + test/test_dists.jl | 27 +++++++++++++++++++++------ 5 files changed, 25 insertions(+), 7 deletions(-) diff --git a/Project.toml b/Project.toml index a43f6a3..e7781c4 100644 --- a/Project.toml +++ b/Project.toml @@ -4,6 +4,7 @@ version = "0.7.0" [deps] LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" [targets.test.deps] Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" diff --git a/src/Distances.jl b/src/Distances.jl index c7d4044..cfcf1e2 100644 --- a/src/Distances.jl +++ b/src/Distances.jl @@ -3,6 +3,7 @@ __precompile__() module Distances using LinearAlgebra +using Statistics export # generic types/functions diff --git a/src/metrics.jl b/src/metrics.jl index abc29e9..1ae54d2 100644 --- a/src/metrics.jl +++ b/src/metrics.jl @@ -157,7 +157,7 @@ const ArraySlice{T} = SubArray{T,1,Array{T,2},Tuple{Base.Slice{Base.OneTo{Int}}, end @inbounds begin s = eval_start(d, a, b) - @simd for I in eachindex(a, b) + @simd for I in 1:length(a) ai = a[I] bi = b[I] s = eval_reduce(d, s, eval_op(d, ai, bi)) diff --git a/test/runtests.jl b/test/runtests.jl index dbd5255..eb9b147 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -3,6 +3,7 @@ using Distances using Test using LinearAlgebra using Random +using Statistics include("F64.jl") include("test_dists.jl") diff --git a/test/test_dists.jl b/test/test_dists.jl index edfa555..0eadecf 100644 --- a/test/test_dists.jl +++ b/test/test_dists.jl @@ -511,9 +511,9 @@ end end @testset "Bregman Divergence" begin - # Some basic tests. + # Some basic tests. @test_throws ArgumentError bregman(x -> x, x -> 2*x, [1, 2, 3], [1, 2, 3]) - # Test if Bregman() correctly implements the gkl divergence between two random vectors. + # Test if Bregman() correctly implements the gkl divergence between two random vectors. F(p) = LinearAlgebra.dot(p, log.(p)); ∇(p) = map(x -> log(x) + 1, p) testDist = Bregman(F, ∇) @@ -522,13 +522,28 @@ end p = p/sum(p); q = q/sum(q); @test evaluate(testDist, p, q) ≈ gkl_divergence(p, q) - # Test if Bregman() correctly implements the squared euclidean dist. between them. + # Test if Bregman() correctly implements the squared euclidean dist. between them. @test bregman(x -> norm(x)^2, x -> 2*x, p, q) ≈ sqeuclidean(p, q) - # Test if Bregman() correctly implements the IS distance. + # Test if Bregman() correctly implements the IS distance. F(p) = -1 * sum(log.(p)) ∇(p) = map(x -> -1 * x^(-1), p) function ISdist(p::AbstractVector, q::AbstractVector) return sum([p[i]/q[i] - log(p[i]/q[i]) - 1 for i in 1:length(p)]) end - @test bregman(F, ∇, p, q) ≈ ISdist(p, q) -end \ No newline at end of file + @test bregman(F, ∇, p, q) ≈ ISdist(p, q) +end + +@testset "zero allocation colwise!" begin + d = Euclidean() + a = rand(2, 41) + b = rand(2, 41) + z = zeros(41) + colwise!(z, d, a, b) + # This fails when bounds checking is enforced + bounds = Base.JLOptions().check_bounds + if bounds == 0 + @test (@allocated colwise!(z, d, a, b)) == 0 + else + @test_broken (@allocated colwise!(z, d, a, b)) == 0 + end +end