Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions Project.toml.template
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name = "LossFunctions"
uuid = "7f8f8fb0-2700-5f03-b4bd-41f8cfc144b6"
version = "0.2.1"

[deps]
InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
LearnBase = "7f8f8fb0-2700-5f03-b4bd-41f8cfc144b6"
Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a"
RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"

[extras]
DualNumbers = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["DualNumbers", "Test", "Statistics", "Random"]
5 changes: 2 additions & 3 deletions src/LossFunctions.jl
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
__precompile__(true)
module LossFunctions

using RecipesBase

import Base.*
using Base.Cartesian
using SparseArrays, InteractiveUtils
using Markdown, SparseArrays, InteractiveUtils


using LearnBase
using LearnBase
import LearnBase: value, value!, deriv, deriv2, scaled, value_deriv,
isminimizable,
isdifferentiable,
Expand Down
42 changes: 21 additions & 21 deletions src/supervised/distance.jl
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,8 @@ It is strictly convex.
const L2DistLoss = LPDistLoss{2}

value(loss::L2DistLoss, difference::Number) = abs2(difference)
deriv(loss::L2DistLoss, difference::T) where {T<:Number} = T(2) * difference
deriv2(loss::L2DistLoss, difference::T) where {T<:Number} = T(2)
deriv(loss::L2DistLoss, difference::T) where {T<:Number} = convert(T,2) * difference
deriv2(loss::L2DistLoss, difference::T) where {T<:Number} = convert(T,2)
Copy link
Member Author

@Evizero Evizero Aug 3, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the changes from T(x) to convert(T,x) are unfortunate, but that seems to be the right thing to do here.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Agreed. Ah, that must be what all the dual number warnings were about...


isdifferentiable(::L2DistLoss) = true
isdifferentiable(::L2DistLoss, at) = true
Expand Down Expand Up @@ -204,17 +204,17 @@ function value(loss::HuberLoss{T1}, difference::T2) where {T1,T2<:Number}
T = promote_type(T1,T2)
abs_diff = abs(difference)
if abs_diff <= loss.d
return T(0.5)*abs2(difference) # quadratic
return convert(T,0.5)*abs2(difference) # quadratic
else
return (loss.d*abs_diff) - T(0.5)*abs2(loss.d) # linear
return (loss.d*abs_diff) - convert(T,0.5)*abs2(loss.d) # linear
end
end
function deriv(loss::HuberLoss{T1}, difference::T2) where {T1,T2<:Number}
T = promote_type(T1,T2)
if abs(difference) <= loss.d
return T(difference) # quadratic
return convert(T,difference) # quadratic
else
return loss.d*T(sign(difference)) # linear
return loss.d*convert(T,sign(difference)) # linear
end
end
function deriv2(loss::HuberLoss{T1}, difference::T2) where {T1,T2<:Number}
Expand All @@ -225,11 +225,11 @@ function value_deriv(loss::HuberLoss{T1}, difference::T2) where {T1,T2<:Number}
T = promote_type(T1,T2)
abs_diff = abs(difference)
if abs_diff <= loss.d
val = T(0.5)*abs2(difference)
der = T(difference)
val = convert(T,0.5)*abs2(difference)
der = convert(T,difference)
else
val = (loss.d*abs_diff) - T(0.5)*abs2(loss.d)
der = loss.d*T(sign(difference))
val = (loss.d*abs_diff) - convert(T,0.5)*abs2(loss.d)
der = loss.d*convert(T,sign(difference))
end
return val,der
end
Expand Down Expand Up @@ -291,13 +291,13 @@ function value(loss::L1EpsilonInsLoss{T1}, difference::T2) where {T1,T2<:Number}
end
function deriv(loss::L1EpsilonInsLoss{T1}, difference::T2) where {T1,T2<:Number}
T = promote_type(T1,T2)
abs(difference) <= loss.ε ? zero(T) : T(sign(difference))
abs(difference) <= loss.ε ? zero(T) : convert(T,sign(difference))
end
deriv2(loss::L1EpsilonInsLoss{T1}, difference::T2) where {T1,T2<:Number} = zero(promote_type(T1,T2))
function value_deriv(loss::L1EpsilonInsLoss{T1}, difference::T2) where {T1,T2<:Number}
T = promote_type(T1,T2)
absr = abs(difference)
absr <= loss.ε ? (zero(T), zero(T)) : (absr - loss.ε, T(sign(difference)))
absr <= loss.ε ? (zero(T), zero(T)) : (absr - loss.ε, convert(T,sign(difference)))
end

issymmetric(::L1EpsilonInsLoss) = true
Expand Down Expand Up @@ -356,16 +356,16 @@ end
function deriv(loss::L2EpsilonInsLoss{T1}, difference::T2) where {T1,T2<:Number}
T = promote_type(T1,T2)
absr = abs(difference)
absr <= loss.ε ? zero(T) : T(2)*sign(difference)*(absr - loss.ε)
absr <= loss.ε ? zero(T) : convert(T,2)*sign(difference)*(absr - loss.ε)
end
function deriv2(loss::L2EpsilonInsLoss{T1}, difference::T2) where {T1,T2<:Number}
T = promote_type(T1,T2)
abs(difference) <= loss.ε ? zero(T) : T(2)
abs(difference) <= loss.ε ? zero(T) : convert(T,2)
end
function value_deriv(loss::L2EpsilonInsLoss{T}, difference::Number) where T
absr = abs(difference)
diff = absr - loss.ε
absr <= loss.ε ? (zero(T), zero(T)) : (abs2(diff), T(2)*sign(difference)*diff)
absr <= loss.ε ? (zero(T), zero(T)) : (abs2(diff), convert(T,2)*sign(difference)*diff)
end

issymmetric(::L2EpsilonInsLoss) = true
Expand Down Expand Up @@ -410,21 +410,21 @@ struct LogitDistLoss <: DistanceLoss end
function value(loss::LogitDistLoss, difference::Number)
er = exp(difference)
T = typeof(er)
-log(T(4)) - difference + 2log(one(T) + er)
-log(convert(T,4)) - difference + 2log(one(T) + er)
end
function deriv(loss::LogitDistLoss, difference::T) where T<:Number
tanh(difference / T(2))
tanh(difference / convert(T,2))
end
function deriv2(loss::LogitDistLoss, difference::Number)
er = exp(difference)
T = typeof(er)
T(2)*er / abs2(one(T) + er)
convert(T,2)*er / abs2(one(T) + er)
end
function value_deriv(loss::LogitDistLoss, difference::Number)
er = exp(difference)
T = typeof(er)
er1 = one(T) + er
-log(T(4)) - difference + 2log(er1), (er - one(T)) / (er1)
-log(convert(T,4)) - difference + 2log(er1), (er - one(T)) / (er1)
end

issymmetric(::LogitDistLoss) = true
Expand Down Expand Up @@ -474,11 +474,11 @@ const PinballLoss = QuantileLoss

function value(loss::QuantileLoss{T1}, diff::T2) where {T1, T2 <: Number}
T = promote_type(T1, T2)
diff * (T(diff > 0) - loss.τ)
diff * (convert(T,diff > 0) - loss.τ)
end
function deriv(loss::QuantileLoss{T1}, diff::T2) where {T1, T2 <: Number}
T = promote_type(T1, T2)
T(diff > 0) - loss.τ
convert(T,diff > 0) - loss.τ
end
deriv2(::QuantileLoss{T1}, diff::T2) where {T1, T2 <: Number} = zero(promote_type(T1, T2))

Expand Down
20 changes: 10 additions & 10 deletions src/supervised/margin.jl
Original file line number Diff line number Diff line change
Expand Up @@ -221,9 +221,9 @@ It is locally Lipschitz continuous and convex, but not strictly convex.
struct L2HingeLoss <: MarginLoss end

value(loss::L2HingeLoss, agreement::T) where {T<:Number} = agreement >= 1 ? zero(T) : abs2(one(T) - agreement)
deriv(loss::L2HingeLoss, agreement::T) where {T<:Number} = agreement >= 1 ? zero(T) : T(2) * (agreement - one(T))
deriv2(loss::L2HingeLoss, agreement::T) where {T<:Number} = agreement >= 1 ? zero(T) : T(2)
value_deriv(loss::L2HingeLoss, agreement::T) where {T<:Number} = agreement >= 1 ? (zero(T), zero(T)) : (abs2(one(T) - agreement), T(2) * (agreement - one(T)))
deriv(loss::L2HingeLoss, agreement::T) where {T<:Number} = agreement >= 1 ? zero(T) : convert(T,2) * (agreement - one(T))
deriv2(loss::L2HingeLoss, agreement::T) where {T<:Number} = agreement >= 1 ? zero(T) : convert(T,2)
value_deriv(loss::L2HingeLoss, agreement::T) where {T<:Number} = agreement >= 1 ? (zero(T), zero(T)) : (abs2(one(T) - agreement), convert(T,2) * (agreement - one(T)))

isunivfishercons(::L2HingeLoss) = true
isdifferentiable(::L2HingeLoss) = true
Expand Down Expand Up @@ -334,17 +334,17 @@ It is Lipschitz continuous and convex, but not strictly convex.
struct ModifiedHuberLoss <: MarginLoss end

function value(loss::ModifiedHuberLoss, agreement::T) where T<:Number
agreement >= -1 ? abs2(max(zero(T), one(agreement) - agreement)) : -T(4) * agreement
agreement >= -1 ? abs2(max(zero(T), one(agreement) - agreement)) : -convert(T,4) * agreement
end
function deriv(loss::ModifiedHuberLoss, agreement::T) where T<:Number
if agreement >= -1
agreement > 1 ? zero(T) : T(2)*agreement - T(2)
agreement > 1 ? zero(T) : convert(T,2)*agreement - convert(T,2)
else
-T(4)
-convert(T,4)
end
end
function deriv2(loss::ModifiedHuberLoss, agreement::T) where T<:Number
agreement < -1 || agreement > 1 ? zero(T) : T(2)
agreement < -1 || agreement > 1 ? zero(T) : convert(T,2)
end

isdifferentiable(::ModifiedHuberLoss) = true
Expand Down Expand Up @@ -389,8 +389,8 @@ It is locally Lipschitz continuous and strongly convex.
struct L2MarginLoss <: MarginLoss end

value(loss::L2MarginLoss, agreement::T) where {T<:Number} = abs2(one(T) - agreement)
deriv(loss::L2MarginLoss, agreement::T) where {T<:Number} = T(2) * (agreement - one(T))
deriv2(loss::L2MarginLoss, agreement::T) where {T<:Number} = T(2)
deriv(loss::L2MarginLoss, agreement::T) where {T<:Number} = convert(T,2) * (agreement - one(T))
deriv2(loss::L2MarginLoss, agreement::T) where {T<:Number} = convert(T,2)

isunivfishercons(::L2MarginLoss) = true
isdifferentiable(::L2MarginLoss) = true
Expand Down Expand Up @@ -484,7 +484,7 @@ struct SigmoidLoss <: MarginLoss end

value(loss::SigmoidLoss, agreement::Number) = one(agreement) - tanh(agreement)
deriv(loss::SigmoidLoss, agreement::Number) = -abs2(sech(agreement))
deriv2(loss::SigmoidLoss, agreement::T) where {T<:Number} = T(2) * tanh(agreement) * abs2(sech(agreement))
deriv2(loss::SigmoidLoss, agreement::T) where {T<:Number} = convert(T,2) * tanh(agreement) * abs2(sech(agreement))

isunivfishercons(::SigmoidLoss) = true
isdifferentiable(::SigmoidLoss) = true
Expand Down
7 changes: 3 additions & 4 deletions test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
module LossFunctionsTests
using LearnBase, LossFunctions, DualNumbers, Statistics, Random, SparseArrays, Test
using LossFunctions

using LearnBase, LossFunctions, DualNumbers
using Statistics, Random, SparseArrays, Test

tests = [
"tst_loss.jl",
Expand All @@ -15,7 +14,7 @@ perf = [

# for deterministic testing

srand(1234)
Random.seed!(1234)

distance_losses = [
L2DistLoss(), LPDistLoss(2.0), L1DistLoss(), LPDistLoss(1.0),
Expand Down