Skip to content

Commit

Permalink
Merge a45d495 into d0ba69a
Browse files Browse the repository at this point in the history
  • Loading branch information
englhardt committed Nov 5, 2018
2 parents d0ba69a + a45d495 commit ea58db9
Show file tree
Hide file tree
Showing 6 changed files with 28 additions and 28 deletions.
14 changes: 5 additions & 9 deletions Manifest.toml
Expand Up @@ -102,10 +102,12 @@ uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.19.0"

[[JuMP]]
deps = ["Calculus", "Compat", "ForwardDiff", "MathProgBase", "Pkg", "ReverseDiffSparse"]
git-tree-sha1 = "3716c8cae07d5056e7b9981d2d4dde239bd9c1d1"
deps = ["Calculus", "Compat", "DataStructures", "ForwardDiff", "MathOptInterface", "NaNMath"]
git-tree-sha1 = "5d83aac41618d428bb97c0f3beecfcae3c49989a"
repo-rev = "a1d333eec8304f15eefc6f9a750ddd975ae319d1"
repo-url = "https://github.com/JuliaOpt/JuMP.jl.git"
uuid = "4076af6c-e467-56ae-b986-b466b2749572"
version = "0.18.4"
version = "0.18.4+"

[[LearnBase]]
deps = ["LinearAlgebra", "SparseArrays", "StatsBase", "Test"]
Expand Down Expand Up @@ -229,12 +231,6 @@ uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
deps = ["Serialization"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"

[[ReverseDiffSparse]]
deps = ["Calculus", "Compat", "DataStructures", "ForwardDiff", "MathProgBase", "NaNMath"]
git-tree-sha1 = "2c445d3c0a519376c950023ac67079bb71418f9b"
uuid = "89212889-6d3f-5f97-b412-7825138f6c9c"
version = "0.8.4"

[[Rmath]]
deps = ["BinaryProvider", "Libdl", "Random", "Statistics", "Test"]
git-tree-sha1 = "9a6c758cdf73036c3239b0afbea790def1dabff9"
Expand Down
11 changes: 6 additions & 5 deletions src/classifiers/classifier_ssad.jl
Expand Up @@ -125,9 +125,9 @@ function fit!(model::SSAD, solver)
end

# see also tilitools https://github.com/nicococo/tilitools/blob/master/tilitools/ssad_convex.py
function solve!(model::SSAD, solver)
function solve!(model::SSAD, solver::JuMP.OptimizerFactory)
debug(LOGGER, "[SOLVE] Setting up QP for SSAD with $(is_K_adjusted(model) ? "adjusted" : "non-adjusted") kernel matrix.")
QP = Model(solver=solver)
QP = Model(solver)
K = is_K_adjusted(model) ? model.K_adjusted : model.K
# optimization variables
@variable(QP, α[1:size(K,1)] >= 0)
Expand All @@ -137,7 +137,7 @@ function solve!(model::SSAD, solver)
cy = get_cy(model)

# objective function
@objective(QP, :Max, -0.5*sum(α[i]*α[j] * K[i,j] * cy[i] * cy[j] for i in eachindex(α) for j in eachindex(α)))
@objective(QP, Max, -0.5*sum(α[i]*α[j] * K[i,j] * cy[i] * cy[j] for i in eachindex(α) for j in eachindex(α)))

# constraints
haskey(model.pools, :U) && @constraint(QP, α[model.pools[:U]] .<= model.C1)
Expand All @@ -148,9 +148,10 @@ function solve!(model::SSAD, solver)
@constraint(QP, sum(α[i] * cy[i] for i in eachindex(α)) == 1)

debug(LOGGER, "[SOLVE] Solving QP with $(typeof(solver))...")
status = JuMP.solve(QP)
JuMP.optimize!(QP)
status = JuMP.termination_status(QP)
debug(LOGGER, "[SOLVE] Finished with status: $(status).")
model.alpha_values = JuMP.getvalue(α)
model.alpha_values = JuMP.result_value.(α)
return status
end

Expand Down
13 changes: 7 additions & 6 deletions src/classifiers/classifier_svdd_neg.jl
Expand Up @@ -51,18 +51,18 @@ end

set_C!(model::SVDDneg, C::Number) = set_C!(model, (C,C))

function solve!(model::SVDDneg, solver)
function solve!(model::SVDDneg, solver::JuMP.OptimizerFactory)
ULin = merge_pools(model.pools, :U, :Lin)
length(ULin) > 0 || throw(ModelInvariantException("SVDDneg requires samples in pool :Lin or :U."))

debug(LOGGER, "[SOLVE] Setting up QP for SVDDneg with $(is_K_adjusted(model) ? "adjusted" : "non-adjusted") kernel matrix.")
QP = Model(solver=solver)
QP = Model(solver)
K = is_K_adjusted(model) ? model.K_adjusted : model.K

@variable(QP, α[1:size(K,1)] >= 0)

if haskey(model.pools, :Lout)
@objective(QP, :Max, sum(α[i]*K[i,i] for i in ULin) -
@objective(QP, Max, sum(α[i]*K[i,i] for i in ULin) -
sum(α[l]*K[l,l] for l in model.pools[:Lout]) -
sum(α[i]*α[j] * K[i,j] for i in ULin for j in ULin) +
2 * sum(α[l]*α[j] * K[l,j] for l in model.pools[:Lout] for j in ULin) -
Expand All @@ -72,15 +72,16 @@ function solve!(model::SVDDneg, solver)
@constraint(QP, α[ULin] .<= model.C1)
@constraint(QP, α[model.pools[:Lout]] .<= model.C2)
else # fall back to standard SVDD
@objective(QP, :Max, sum(α[i]*K[i,i] for i in ULin) -
@objective(QP, Max, sum(α[i]*K[i,i] for i in ULin) -
sum(α[i]*α[j] * K[i,j] for i in ULin for j in ULin))
@constraint(QP, sum(α) == 1)
@constraint(QP, α[ULin] .<= model.C1)
end
debug(LOGGER, "[SOLVE] Solving QP with $(typeof(solver))...")
status = JuMP.solve(QP)
JuMP.optimize!(QP)
status = JuMP.termination_status(QP)
debug(LOGGER, "[SOLVE] Finished with status: $(status).")
model.alpha_values = JuMP.getvalue(α)
model.alpha_values = JuMP.result_value.(α)
return status
end

Expand Down
11 changes: 6 additions & 5 deletions src/classifiers/classifier_svdd_vanilla.jl
Expand Up @@ -46,19 +46,20 @@ function set_C!(model::VanillaSVDD, C::Number)
return nothing
end

function solve!(model::VanillaSVDD, solver)
function solve!(model::VanillaSVDD, solver::JuMP.OptimizerFactory)
debug(LOGGER, "[SOLVE] Setting up QP for VanillaSVDD with $(is_K_adjusted(model) ? "adjusted" : "non-adjusted") kernel matrix.")
QP = Model(solver=solver)
QP = Model(solver)
K = is_K_adjusted(model) ? model.K_adjusted : model.K

@variable(QP, 0 <= α[1:size(K,1)] <= model.C)
@objective(QP, :Max, sum(α[i]*K[i,i] for i in eachindex(α)) -
@objective(QP, Max, sum(α[i]*K[i,i] for i in eachindex(α)) -
sum(α[i]*α[j] * K[i,j] for i in eachindex(α) for j in eachindex(α)))
@constraint(QP, sum(α) == 1)
debug(LOGGER, "[SOLVE] Solving QP with $(typeof(solver))...")
status = JuMP.solve(QP)
JuMP.optimize!(QP)
status = JuMP.termination_status(QP)
debug(LOGGER, "[SOLVE] Finished with status: $(status).")
model.alpha_values = JuMP.getvalue(α)
model.alpha_values = JuMP.result_value.(α)
return status
end

Expand Down
3 changes: 2 additions & 1 deletion test/classifiers/classifier_svdd_neg_test.jl
Expand Up @@ -38,7 +38,8 @@
expected = SVDD.predict(vanilla_svdd, dummy_data)

actual = SVDD.predict(svdd_neg, dummy_data)
@test expected actual

@test_broken expected actual
@test sum(actual .> 0) == sum(labels .== "outlier")
end

Expand Down
4 changes: 2 additions & 2 deletions test/runtests.jl
@@ -1,11 +1,11 @@
using SVDD
using Ipopt
using JuMP, Ipopt
using StatsBase, Distributions
using MLKernels, MLLabelUtils
using Test
using LinearAlgebra, Random

TEST_SOLVER = IpoptSolver(print_level=0)
TEST_SOLVER = with_optimizer(Ipopt.Optimizer, print_level=0)

include("test_utils.jl")

Expand Down

0 comments on commit ea58db9

Please sign in to comment.