Skip to content

Commit

Permalink
Only use independent variables for hopsy
Browse files Browse the repository at this point in the history
  • Loading branch information
schmoelder committed Mar 11, 2024
1 parent 95835ff commit e8fad4b
Show file tree
Hide file tree
Showing 2 changed files with 106 additions and 72 deletions.
95 changes: 72 additions & 23 deletions CADETProcess/optimization/optimizationProblem.py
Expand Up @@ -2426,6 +2426,19 @@ def Aeq_transformed(self):

return Aeq_t

@property
def Aeq_independent(self):
"""np.ndarray: LHS Matrix of linear inequality constraints for indep variables.
See Also
--------
Aeq
Aeq_transformed
Aeq_independent_transformed
"""
return self.Aeq[:, self.independent_variable_indices]

@property
def Aeq_independent_transformed(self):
"""np.ndarray: LHS of lin ineqs for indep variables in transformed space.
Expand Down Expand Up @@ -2653,18 +2666,45 @@ def prune_cache(self):
"""Prune cache with (intermediate) results."""
self.cache.prune()

def create_hopsy_problem(self, simplify=False, use_custom_model=False):
"""creates a hopsy problem from an optimization problem"""
def create_hopsy_problem(
self,
include_dependent_variables=True,
simplify=False,
use_custom_model=False
):
"""Creates a hopsy problem from the optimization problem.
Parameters
----------
include_dependent_variables : bool, optional
If True, only use the hopsy problem. The default is False.
simplify : bool, optional
If True, simplify the hopsy problem. The default is False.
use_custom_model : bool, optional
If True, use custom model to improve sampling of log normalized parameters.
The default is False.
Returns
-------
problem
hopsy.Problem
"""
class CustomModel():
def __init__(self, log_space_indices: list):
self.log_space_indices = log_space_indices

def compute_negative_log_likelihood(self, x):
return np.sum(np.log(x[self.log_space_indices]))

if include_dependent_variables:
variables = self.variables
else:
variables = self.independent_variables

log_space_indices = []
for i, var in enumerate(self.variables):

for i, var in enumerate(variables):
if (
isinstance(var._transform, NormLogTransform)
or
Expand All @@ -2684,24 +2724,39 @@ def compute_negative_log_likelihood(self, x):
else:
model = None

if include_dependent_variables:
A = self.A
b = self.b
lower_bounds = self.lower_bounds
upper_bounds = self.upper_bounds
Aeq = self.Aeq
beq = self.beq
else:
A = self.A_independent
b = self.b
lower_bounds = self.lower_bounds_independent
upper_bounds = self.upper_bounds_independent
Aeq = self.Aeq_independent
beq = self.beq

problem = hopsy.Problem(
self.A,
self.b,
A,
b,
model,
)

problem = hopsy.add_box_constraints(
problem,
self.lower_bounds,
self.upper_bounds,
lower_bounds,
upper_bounds,
simplify=simplify,
)

if self.n_linear_equality_constraints > 0:
problem = hopsy.add_equality_constraints(
problem,
self.Aeq,
self.beq
Aeq,
beq
)

return problem
Expand All @@ -2723,7 +2778,7 @@ def get_chebyshev_center(self, include_dependent_variables=True):
Chebyshev center.
"""
problem = self.create_hopsy_problem(
simplify=False, use_custom_model=True
include_dependent_variables=False, simplify=False, use_custom_model=True
)
# !!! Additional checks in place to handle PolyRound.round()
# removing "small" dimensions.
Expand All @@ -2742,16 +2797,14 @@ def get_chebyshev_center(self, include_dependent_variables=True):
problem_rounded, original_space=True
)[:, 0]

if np.all(np.greater(chebyshev_rounded, self.lower_bounds)):
if np.all(np.greater(chebyshev_rounded, self.lower_bounds_independent)):
problem = problem_rounded
chebyshev = chebyshev_rounded
else:
chebyshev = chebyshev_orig
else:
chebyshev = chebyshev_orig

chebyshev = self.get_independent_values(chebyshev)

if include_dependent_variables:
chebyshev = self.get_dependent_values(chebyshev)

Expand Down Expand Up @@ -2796,11 +2849,13 @@ def create_initial_values(
warnings.simplefilter("ignore")

problem = self.create_hopsy_problem(
simplify=False, use_custom_model=True
include_dependent_variables=False,
simplify=False,
use_custom_model=True,
)

chebychev_center = self.get_chebyshev_center(
include_dependent_variables=True
include_dependent_variables=False
)

if seed is None:
Expand All @@ -2818,14 +2873,7 @@ def create_initial_values(
acceptance_rate, states = hopsy.sample(
mc, rng_hopsy, n_samples=burn_in, thinning=2
)
values = states[0, ...]

# Since hopsy does not know about dependencies, they are recomputed for consistency.
independent_indices = [
i for i, variable in enumerate(self.variables)
if variable in self.independent_variables
]
independent_values = values[:, independent_indices]
independent_values = states[0, ...]

values = []
counter = 0
Expand Down Expand Up @@ -2970,6 +3018,7 @@ def check_config(self, ignore_linear_constraints=False):

if self.n_objectives == 0:
flag = False

if self.n_linear_constraints + self.n_linear_equality_constraints > 0 \
and not ignore_linear_constraints:
if not self.check_linear_constraints_transforms():
Expand Down
83 changes: 34 additions & 49 deletions tests/test_optimization_problem.py
Expand Up @@ -847,30 +847,26 @@ def transform():
self.assertEqual(variables_expected, variables)

def test_initial_values_without_dependencies(self):
x0_chebyshev_expected = [0.75, 0.5, 0.5]
x0_chebyshev_expected = [2/3, 0.5, 1/3]
x0_chebyshev = self.optimization_problem.get_chebyshev_center(
include_dependent_variables=False
)
np.testing.assert_almost_equal(x0_chebyshev, x0_chebyshev_expected)

variables_expected = [0.75, 0.5 , 0.5 , 0.5]
variables_expected = [2/3, 0.5, 0.5, 1/3]
variables = self.optimization_problem.get_dependent_values(x0_chebyshev)
np.testing.assert_almost_equal(variables, variables_expected)

self.assertTrue(
self.optimization_problem.check_linear_constraints(
x0_chebyshev, get_dependent_values=True
)
)
self.assertTrue(
self.optimization_problem.check_linear_constraints(variables)
)

x0_seed_1_expected = [[0.7311044, 0.1727515, 0.1822629]]
x0_seed_1_expected = [[0.90164487, 0.27971297, 0.70490538]]
x0_seed_1 = self.optimization_problem.create_initial_values(
1, seed=1, include_dependent_variables=False
)
np.testing.assert_almost_equal(x0_seed_1, x0_seed_1_expected)
self.assertTrue(
self.optimization_problem.check_linear_constraints(
x0_seed_1[0], get_dependent_values=True
)
)

x0_seed_1_random = self.optimization_problem.create_initial_values(
1, include_dependent_variables=False
Expand All @@ -883,16 +879,16 @@ def test_initial_values_without_dependencies(self):
np.testing.assert_almost_equal(x0_seed_1_random, x0_chebyshev_expected)

x0_seed_10_expected = [
[0.7311043824888657, 0.1727515432673712, 0.18226293643057073],
[0.9836918383919191, 0.8152389217047241, 0.8560016844195478],
[0.7358144798470049, 0.2574714423019172, 0.49387609464567295],
[0.34919171897183954, 0.05751800197656948, 0.3237260675631758],
[0.9265061673265441, 0.4857572549618687, 0.8149444448089398],
[0.9065669851023331, 0.1513817591204391, 0.7710992332649812],
[0.8864554240066591, 0.4771068979697068, 0.5603893963194555],
[0.6845940550232432, 0.2843172686185149, 0.6792904559788712],
[0.923735889273789, 0.6890814170651027, 0.7366940211809302],
[0.8359314486227345, 0.39493879515319996, 0.8128182754300088]
[0.90164487, 0.27971297, 0.70490538],
[0.78125338, 0.17275154, 0.54650281],
[0.97623563, 0.19106333, 0.79016462],
[0.12826546, 0.03476412, 0.05270397],
[0.89791146, 0.29062957, 0.7429437 ],
[0.8703531 , 0.20575487, 0.68237913],
[0.92572799, 0.01653708, 0.33539715],
[0.96337056, 0.07106034, 0.86232007],
[0.85559046, 0.4824452 , 0.84474955],
[0.8588277 , 0.73874869, 0.80355266]
]
x0_seed_10 = self.optimization_problem.create_initial_values(
10, seed=1, include_dependent_variables=False
Expand All @@ -907,37 +903,26 @@ def test_initial_values_without_dependencies(self):
np.testing.assert_almost_equal(x0_seed_10_random, x0_seed_10_expected)

def test_initial_values(self):
x0_chebyshev_expected = [0.75, 0.5, 0.5, 0.5]
x0_chebyshev_expected = [2/3, 0.5, 0.5, 1/3]
x0_chebyshev = self.optimization_problem.get_chebyshev_center(
include_dependent_variables=True
)

np.testing.assert_almost_equal(x0_chebyshev, x0_chebyshev_expected)

independent_variables_expected = [0.75, 0.5, 0.5]
independent_variables = self.optimization_problem.get_independent_values(
x0_chebyshev
)
independent_variables_expected = [2/3, 0.5, 1/3]
independent_variables = self.optimization_problem.get_independent_values(x0_chebyshev)
np.testing.assert_almost_equal(independent_variables, independent_variables_expected)

self.assertTrue(
self.optimization_problem.check_linear_constraints(
x0_chebyshev, get_dependent_values=False
)
)
self.assertTrue(
self.optimization_problem.check_linear_constraints(x0_chebyshev)
)

x0_seed_1_expected = [[0.7311044, 0.1727515, 0.1727515, 0.1822629]]
x0_seed_1_expected = [[0.9016449, 0.279713 , 0.279713 , 0.7049054]]
x0_seed_1 = self.optimization_problem.create_initial_values(
1, seed=1, include_dependent_variables=True
)
np.testing.assert_almost_equal(x0_seed_1, x0_seed_1_expected)
self.assertTrue(self.optimization_problem.check_linear_constraints(x0_seed_1[0]))

x0_seed_1_random = self.optimization_problem.create_initial_values(
1, include_dependent_variables=True
)
)[0]

with self.assertRaises(AssertionError):
np.testing.assert_almost_equal(x0_seed_1_random, x0_seed_1_expected)
Expand All @@ -946,16 +931,16 @@ def test_initial_values(self):
np.testing.assert_almost_equal(x0_seed_1_random, x0_chebyshev_expected)

x0_seed_10_expected = [
[0.7311043824888657, 0.1727515432673712, 0.1727515432673712, 0.18226293643057073],
[0.9836918383919191, 0.8152389217047241, 0.8152389217047241, 0.8560016844195478],
[0.7358144798470049, 0.2574714423019172, 0.2574714423019172, 0.49387609464567295],
[0.34919171897183954, 0.05751800197656948, 0.05751800197656948, 0.3237260675631758],
[0.9265061673265441, 0.4857572549618687, 0.4857572549618687, 0.8149444448089398],
[0.9065669851023331, 0.1513817591204391, 0.1513817591204391, 0.7710992332649812],
[0.8864554240066591, 0.4771068979697068, 0.4771068979697068, 0.5603893963194555],
[0.6845940550232432, 0.2843172686185149, 0.2843172686185149, 0.6792904559788712],
[0.923735889273789, 0.6890814170651027, 0.6890814170651027, 0.7366940211809302],
[0.8359314486227345, 0.39493879515319996, 0.39493879515319996, 0.8128182754300088]
[0.90164487, 0.27971297, 0.27971297, 0.70490538],
[0.78125338, 0.17275154, 0.17275154, 0.54650281],
[0.97623563, 0.19106333, 0.19106333, 0.79016462],
[0.12826546, 0.03476412, 0.03476412, 0.05270397],
[0.89791146, 0.29062957, 0.29062957, 0.7429437 ],
[0.8703531 , 0.20575487, 0.20575487, 0.68237913],
[0.92572799, 0.01653708, 0.01653708, 0.33539715],
[0.96337056, 0.07106034, 0.07106034, 0.86232007],
[0.85559046, 0.4824452 , 0.4824452 , 0.84474955],
[0.8588277 , 0.73874869, 0.73874869, 0.80355266]
]
x0_seed_10 = self.optimization_problem.create_initial_values(
10, seed=1, include_dependent_variables=True
Expand Down

0 comments on commit e8fad4b

Please sign in to comment.