/
LossFunctions.jl
109 lines (92 loc) · 2.15 KB
/
LossFunctions.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
module LossFunctions
import Base.*
using Base.Cartesian
using Markdown, SparseArrays, InteractiveUtils
using RecipesBase
using LearnBase
import LearnBase:
value, value!,
deriv, deriv2, deriv!,
value_deriv,
scaled,
isminimizable,
isdifferentiable,
istwicedifferentiable,
isconvex,
isstrictlyconvex,
isstronglyconvex,
isnemitski,
isunivfishercons,
isfishercons,
islipschitzcont,
islocallylipschitzcont,
islipschitzcont_deriv, # maybe overkill
isclipable,
ismarginbased,
isclasscalibrated,
isdistancebased,
issymmetric
export
value,
value!,
deriv2!,
value_fun,
deriv_fun,
deriv2_fun,
value_deriv_fun,
ZeroOneLoss,
LogitMarginLoss,
PerceptronLoss,
HingeLoss,
L1HingeLoss,
L2HingeLoss,
SmoothedL1HingeLoss,
ModifiedHuberLoss,
L2MarginLoss,
ExpLoss,
SigmoidLoss,
DWDMarginLoss,
LPDistLoss,
L1DistLoss,
L2DistLoss,
PeriodicLoss,
HuberLoss,
EpsilonInsLoss,
L1EpsilonInsLoss,
L2EpsilonInsLoss,
LogitDistLoss,
QuantileLoss,
PinballLoss,
MisclassLoss,
PoissonLoss,
LogitProbLoss,
CrossEntropyLoss,
ZeroOneLoss,
OrdinalMarginLoss,
OrdinalHingeLoss,
weightedloss,
AggMode
include("common.jl")
include("aggregatemode.jl")
include("supervised/supervised.jl")
include("supervised/sparse.jl")
include("supervised/distance.jl")
include("supervised/margin.jl")
include("supervised/scaledloss.jl")
include("supervised/weightedbinary.jl")
include("supervised/other.jl")
include("supervised/ordinal.jl")
include("supervised/io.jl")
include("deprecated.jl")
# allow using some special losses as function
(loss::ScaledSupervisedLoss)(args...) = value(loss, args...)
(loss::WeightedBinaryLoss)(args...) = value(loss, args...)
# allow using SupervisedLoss as function
for T in filter(isconcretetype, subtypes(SupervisedLoss))
@eval (loss::$T)(args...) = value(loss, args...)
end
# allow using MarginLoss and DistanceLoss as function
for T in union(subtypes(DistanceLoss), subtypes(MarginLoss))
@eval (loss::$T)(args...) = value(loss, args...)
end
end # module