Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
a649f8f
so it starts (take two)
VarLad Oct 22, 2024
0ef3d93
and it continues... (some refactor)
VarLad Oct 25, 2024
075687e
some rewrite work
VarLad Nov 14, 2024
c0bead0
moar stuff
VarLad Dec 4, 2024
ace22b8
make tests pass?
VarLad Dec 4, 2024
8739f7a
extensions, explore_learn, generate viable weights
VarLad Dec 6, 2024
8bddccc
make things work...
VarLad Dec 8, 2024
71f72a7
Fix Aqua.jl tests
Azzaare Dec 23, 2024
07d884d
Fix explicitImports.jl tests
Azzaare Dec 23, 2024
f53586e
Reactivate Jet.jl. Some errors need to be fixed
Azzaare Dec 23, 2024
031af06
Change some names
VarLad Dec 25, 2024
b2bcbf9
Fix JET error
VarLad Jan 8, 2025
a5d1472
better tests
VarLad Mar 7, 2025
3ab71ec
vals
VarLad Mar 10, 2025
1d93892
some fix to evaluation
VarLad Mar 10, 2025
881f8a4
better tests
VarLad Mar 10, 2025
6775b0a
some improvements (regularization doesn't seem to work)
VarLad Mar 12, 2025
ad4107b
better regularization, simple filter layer
VarLad Mar 13, 2025
7cb27e4
Add regularization back
VarLad Mar 14, 2025
d10fcae
Clean up part 1
Azzaare Mar 15, 2025
1d173f3
Clean up GA and Ext
Azzaare Mar 15, 2025
e68a95d
Initial (untested) new LocalSearch optimizer, new operations, changes…
VarLad Mar 17, 2025
00733af
Updates for CBLS Opt
Azzaare Mar 17, 2025
180c506
fix GA test
VarLad Mar 17, 2025
52aef67
Fixing CBLS
Azzaare Mar 17, 2025
0cfc081
fix aggregation
VarLad Mar 17, 2025
96e5b29
more fix aggregation
VarLad Mar 17, 2025
7343677
more more fix aggregation
VarLad Mar 17, 2025
fcc11a9
add codegen plus some other stuff
VarLad Mar 23, 2025
e7a170a
export compose
VarLad Mar 23, 2025
269a9d1
simplification
VarLad Mar 24, 2025
7e22d44
small additions
VarLad Mar 24, 2025
ccd5137
type stability in codegen
VarLad Mar 24, 2025
a4858da
Some changes
VarLad Mar 31, 2025
d6a36b2
add JuMP solver
VarLad Apr 2, 2025
b5eabb8
Minor changes to config
VarLad May 27, 2025
5f4947f
Update compat
Azzaare Jun 4, 2025
a46bf29
Mark CBLS tests as broken for now
Azzaare Jun 4, 2025
09ebee1
Temporary removal of CBLS tests until LocalSearchSolvers updates its …
Azzaare Jun 4, 2025
7708c03
Compat and format
Azzaare Jun 4, 2025
9cbb177
crude fix for latest version of ConstraintDomains
VarLad Jun 4, 2025
7e172fc
Merge pull request #85 from JuliaConstraints/for_chetan2
Azzaare Jun 4, 2025
045b1b0
Merge pull request #84 from JuliaConstraints/refactor_new
Azzaare Jun 4, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .JuliaFormatter.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
style = "sciml"
46 changes: 41 additions & 5 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,25 +1,46 @@
name = "CompositionalNetworks"
uuid = "4b67e4b5-442d-4ef5-b760-3f5df3a57537"
authors = ["Jean-François Baffier"]
version = "0.5.9"
version = "0.6.0"

[deps]
ConstraintCommons = "e37357d9-0691-492f-a822-e5ea6a920954"
ConstraintDomains = "5800fd60-8556-4464-8d61-84ebf7a0bedb"
Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"
Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7"
ExproniconLite = "55351af7-c7e9-48d6-89ff-24e801d99491"
JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899"
OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
TestItems = "1c621080-faea-4a02-84b6-bbd5e436b8fe"
Unrolled = "9602ed7d-8fef-5bc8-8597-8f21381861e8"

[weakdeps]
Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6"
# LocalSearchSolvers = "2b10edaa-728d-4283-ac71-07e312d6ccf3"
# JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
# Juniper = "2ddba703-00a4-53a7-87a5-e8b9971dde84"
# Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9"
# Gurobi = "2e9cd046-0924-5485-92f1-d5272153d98b"

[extensions]
GeneticExt = "Evolutionary"
# LocalSearchSolversExt = "LocalSearchSolvers"
# JuMPExt = ["JuMP", "Juniper", "Ipopt", "Gurobi"]

[compat]
ConstraintCommons = "0.2"
ConstraintDomains = "0.3"
LocalSearchSolvers = "0.4"
Evolutionary = "0.11"
# JuMP = "1"
# Juniper = "0.9"
# Ipopt = "1"
# Gurobi = "1.7"
ConstraintCommons = "0.2, 0.3"
ConstraintDomains = "0.3, 0.4"
Dictionaries = "0.4"
Distances = "0.10"
JuliaFormatter = "1"
ExproniconLite = "0.10.13"
JuliaFormatter = "1, 2"
OrderedCollections = "1"
Random = "1"
TestItems = "1"
Expand All @@ -29,6 +50,11 @@ julia = "1.10"
[extras]
Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6"
LocalSearchSolvers = "2b10edaa-728d-4283-ac71-07e312d6ccf3"
# JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
# Juniper = "2ddba703-00a4-53a7-87a5-e8b9971dde84"
# Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9"
# Gurobi = "2e9cd046-0924-5485-92f1-d5272153d98b"
ExplicitImports = "7d51a73a-1435-4ff3-83d9-f097790105c7"
JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b"
Memoization = "6fafb56a-5788-4b4e-91ca-c0cea6611c73"
Expand All @@ -37,4 +63,14 @@ TestItemRunner = "f8b46487-2199-4994-9208-9a1283c18c0a"
ThreadPools = "b189fb0b-2eb5-4ed4-bc0c-d34c51242431"

[targets]
test = ["Aqua", "ExplicitImports", "JET", "Evolutionary", "Memoization", "Test", "TestItemRunner", "ThreadPools"]
test = [
"Aqua",
"ExplicitImports",
"JET",
"Evolutionary",
# "LocalSearchSolvers",
"Memoization",
"Test",
"TestItemRunner",
"ThreadPools",
]
133 changes: 133 additions & 0 deletions ext/GeneticExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
module GeneticExt

import CompositionalNetworks:
CompositionalNetworks, AbstractICN, Configurations, manhattan,
hamming
import CompositionalNetworks: GeneticOptimizer, apply!, weights_bias, regularization
import CompositionalNetworks: evaluate, solutions
import Evolutionary: Evolutionary, tournament, SPX, flip, GA

function CompositionalNetworks.GeneticOptimizer(;
global_iter = Threads.nthreads(),
# local_iter=64,
local_iter = 400,
memoize = false,
#pop_size=64,
pop_size = 100,
sampler = nothing
)
return GeneticOptimizer(global_iter, local_iter, memoize, pop_size, sampler)
end

function generate_population(icn, pop_size; vect = [])
population = Vector{BitVector}()
if isempty(vect)
foreach(_ -> push!(population, falses(length(icn.weights))), 1:pop_size)
else
foreach(_ -> push!(population, vect), 1:pop_size)
end
return population
end

function CompositionalNetworks.optimize!(
icn::T,
configurations::Configurations,
# dom_size,
metric_function::Union{Function, Vector{Function}},
optimizer_config::GeneticOptimizer;
samples = nothing,
memoize = false,
parameters...
) where {T <: AbstractICN}

# @info icn.weights

# inplace = zeros(dom_size, 18)
solution_iter = solutions(configurations)
non_solutions = solutions(configurations; non_solutions = true)
solution_vector = [i.x for i in solution_iter]

function fitness(w)
weights_validity = apply!(icn, w)

a = if metric_function isa Function
metric_function(
icn,
configurations,
solution_vector;
weights_validity = weights_validity,
parameters...
)
else
minimum(
met -> met(
icn,
configurations,
solution_vector;
weights_validity = weights_validity,
parameters...
),
metric_function
)
end

b = weights_bias(w)
c = regularization(icn)

function new_regularization(icn::AbstractICN)
start = 1
count = 0
total = 0
for (i, layer) in enumerate(icn.layers)
if !layer.mutex
ran = start:(start + icn.weightlen[i] - 1)
op = findall(icn.weights[ran])
max_op = ran .- (start - 1)
total += (sum(op) / sum(max_op))
count += 1
end
start += icn.weightlen[i]
end
return total / count
end

d = sum(findall(icn.weights)) /
(length(icn.weights) * (length(icn.weights) + 1) / 2)

e = new_regularization(icn)

# @info "Lot of things" a b c d e
#=
println("""
sum: $a
weights bias: $b
regularization: $c
new reg: $e
thread: $(Threads.threadid())
""") =#

return a + b + c
end

_icn_ga = GA(;
populationSize = optimizer_config.pop_size,
crossoverRate = 0.8,
epsilon = 0.05,
selection = tournament(4),
crossover = SPX,
mutation = flip,
mutationRate = 1.0
)

pop = generate_population(icn, optimizer_config.pop_size)
r = Evolutionary.optimize(
fitness,
pop,
_icn_ga,
Evolutionary.Options(; iterations = optimizer_config.local_iter)
)
validity = apply!(icn, Evolutionary.minimizer(r))
return icn => validity
end

end
109 changes: 109 additions & 0 deletions ext/JuMPExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
module JuMPExt

using JuMP
using Juniper
using Ipopt
using Gurobi

# Original imports
import CompositionalNetworks: CompositionalNetworks, AbstractICN, Configurations
import CompositionalNetworks: JuMPOptimizer, apply!, weights_bias, regularization
import CompositionalNetworks: evaluate, solutions

function CompositionalNetworks.optimize!(
icn::T,
configurations::Configurations,
metric_function::Union{Function, Vector{Function}},
optimizer_config::JuMPOptimizer;
parameters...
) where {T <: AbstractICN}
# Create model
m = Model()

# Set up MINLP solver
nl_solver = optimizer_with_attributes(Ipopt.Optimizer, "print_level" => 0)
mip_solver = optimizer_with_attributes(Gurobi.Optimizer, "OutputFlag" => 0)

set_optimizer(
m,
optimizer_with_attributes(
Juniper.Optimizer,
"nl_solver" => nl_solver,
"mip_solver" => mip_solver,
"log_levels" => []
)
)

n = length(icn.weights)

# All variables are binary
@variable(m, w[1:n], Bin)

# Add constraints
start = 1
for (i, layer) in enumerate(icn.layers)
stop = start + icn.weightlen[i] - 1
idx_range = start:stop

if layer.mutex
# Mutually exclusive constraint - at most one variable can be true
# Equivalent to: max(0.0, sum(w[idx_range]) - 1) = 0
@constraint(m, sum(w[j] for j in idx_range) <= 1)
else
# No empty layer constraint - at least one variable must be true
# Equivalent to: max(0, 1 - sum(w[idx_range])) = 0
@constraint(m, sum(w[j] for j in idx_range) >= 1)
end

start = stop + 1
end

# Define fitness function - keeping the original structure
function fitness(w_values)
# Convert JuMP variables to BitVector
w_bits = BitVector([value(w_values[i]) > 0.5 for i in 1:length(w_values)])

weights_validity = apply!(icn, w_bits)

s = if metric_function isa Function
metric_function(
icn,
configurations,
solution_vector;
weights_validity = weights_validity,
parameters...
)
else
minimum(
met -> met(
icn,
configurations,
solution_vector;
weights_validity = weights_validity,
parameters...
),
metric_function
)
end
return s + weights_bias(w_bits) + regularization(icn)
end

# Define objective using the fitness function
@NLobjective(m, Min, fitness(w))

# Solve model
optimize!(m)

# Return solution
if termination_status(m) in [MOI.OPTIMAL, MOI.LOCALLY_SOLVED]
w_sol = value.(w) .> 0.5 # Convert to BitVector
weights_validity = apply!(icn, BitVector(w_sol))
return icn => weights_validity
else
# No solution found, generate new valid weights
CompositionalNetworks.generate_new_valid_weights!(icn)
return icn => true
end
end

end
Loading
Loading