Skip to content

Commit

Permalink
Move chebyshev center to own method
Browse files Browse the repository at this point in the history
  • Loading branch information
schmoelder committed Jan 10, 2024
1 parent 9934309 commit 65b583a
Show file tree
Hide file tree
Showing 4 changed files with 122 additions and 110 deletions.
162 changes: 89 additions & 73 deletions CADETProcess/optimization/optimizationProblem.py
Original file line number Diff line number Diff line change
Expand Up @@ -2700,8 +2700,52 @@ def compute_negative_log_likelihood(self, x):

return problem

def get_chebyshev_center(self, include_dependent_variables=True):
"""Compute chebychev center.
Parameters
----------
include_dependent_variables : bool, optional
If True, include dependent variables in population.
Returns
-------
chebyshev : list
Chebyshev center.
"""
problem = self.create_hopsy_problem(
simplify=False, use_custom_model=True
)
# !!! Additional checks in place to handle PolyRound.round()
# removing "small" dimensions.
# Bug reported, Check for future release!
chebyshev_orig = hopsy.compute_chebyshev_center(problem)[:, 0]

try:
problem_rounded = hopsy.round(problem)
except ValueError:
problem_rounded = problem

if problem_rounded.A.shape[1] == problem.A.shape[1]:
chebyshev_rounded = hopsy.compute_chebyshev_center(problem_rounded)[:, 0]

if np.all(np.greater(chebyshev_rounded, self.lower_bounds)):
problem = problem_rounded
chebyshev = chebyshev_rounded
else:
chebyshev = chebyshev_orig
else:
chebyshev = chebyshev_orig

chebyshev = self.get_independent_values(chebyshev)

if include_dependent_variables:
chebyshev = self.get_dependent_values(chebyshev)

return chebyshev

def create_initial_values(
self, n_samples=1, method='random', seed=None, burn_in=100000,
self, n_samples=1, seed=None, burn_in=100000,
include_dependent_variables=True):
"""Create initial value within parameter space.
Expand All @@ -2712,9 +2756,6 @@ def create_initial_values(
----------
n_samples : int
Number of initial values to be drawn
method : str, optional
chebyshev: Return center of the minimal-radius ball enclosing the entire set .
random: Any random valid point in the parameter space.
seed : int, optional
Seed to initialize random numbers. Only used if method == 'random'
burn_in : int, optional
Expand Down Expand Up @@ -2743,92 +2784,67 @@ def create_initial_values(
problem = self.create_hopsy_problem(
simplify=False, use_custom_model=True
)
# !!! Additional checks in place to handle PolyRound.round()
# removing "small" dimensions.
# Bug reported, Check for future release!
chebyshev_orig = hopsy.compute_chebyshev_center(problem)[:, 0]

try:
problem_rounded = hopsy.round(problem)
except ValueError:
problem_rounded = problem

if problem_rounded.A.shape[1] == problem.A.shape[1]:
chebyshev_rounded = hopsy.compute_chebyshev_center(problem_rounded)[:, 0]

if np.all(np.greater(chebyshev_rounded, self.lower_bounds)):
problem = problem_rounded
chebyshev = chebyshev_rounded
else:
chebyshev = chebyshev_orig
else:
chebyshev = chebyshev_orig
chebychev_center = self.get_chebyshev_center(
include_dependent_variables=True
)

if n_samples == 1 and method == 'chebyshev':
values = np.array(chebyshev_orig, ndmin=2)
else:
if seed is None:
seed = random.randint(0, 255)
if seed is None:
seed = random.randint(0, 255)

rng = np.random.default_rng(seed)
rng = np.random.default_rng(seed)

mc = hopsy.MarkovChain(
problem,
proposal=hopsy.UniformCoordinateHitAndRunProposal,
starting_point=chebyshev
)
rng_hopsy = hopsy.RandomNumberGenerator(seed=seed)
mc = hopsy.MarkovChain(
problem,
proposal=hopsy.UniformCoordinateHitAndRunProposal,
starting_point=chebychev_center
)
rng_hopsy = hopsy.RandomNumberGenerator(seed=seed)

acceptance_rate, states = hopsy.sample(
mc, rng_hopsy, n_samples=burn_in, thinning=2
)
values = states[0, ...]
acceptance_rate, states = hopsy.sample(
mc, rng_hopsy, n_samples=burn_in, thinning=2
)
values = states[0, ...]

# Because hopsy doesn't know about dependencies, remove dependencies and recompute them later
# Since hopsy does not know about dependencies, they are recomputed for consistency.
independent_indices = [
i for i, variable in enumerate(self.variables)
if variable in self.independent_variables
]
independent_values = values[:, independent_indices]

# from this point onward, `values` contains dependent variables
if n_samples == 1 and method == 'chebyshev':
values = independent_values
if include_dependent_variables:
values = self.get_dependent_values(values[0])
else:
values = []
counter = 0
while len(values) < n_samples:
if counter > burn_in:
raise CADETProcessError(
"Cannot find individuals that fulfill constraints."
)
values = []
counter = 0
while len(values) < n_samples:
if counter > burn_in:
raise CADETProcessError(
"Cannot find individuals that fulfill constraints."
)

counter += 1
i = rng.integers(0, burn_in)
ind = []
for i_var, var in enumerate(self.independent_variables):
ind.append(
float(np.format_float_positional(
independent_values[i, i_var],
precision=var.precision, fractional=False
))
)
counter += 1
i = rng.integers(0, burn_in)
ind = []
for i_var, var in enumerate(self.independent_variables):
ind.append(
float(np.format_float_positional(
independent_values[i, i_var],
precision=var.precision, fractional=False
))
)

ind = self.get_dependent_values(ind)
ind = self.get_dependent_values(ind)

if not self.check_bounds(ind):
continue
if not self.check_linear_constraints(ind):
continue
if not self.check_linear_equality_constraints(ind):
continue
if not self.check_bounds(ind):
continue
if not self.check_linear_constraints(ind):
continue
if not self.check_linear_equality_constraints(ind):
continue

if not include_dependent_variables:
ind = self.get_independent_values(ind)
if not include_dependent_variables:
ind = self.get_independent_values(ind)

values.append(ind)
values.append(ind)

return np.array(values, ndmin=2)

Expand Down
4 changes: 2 additions & 2 deletions CADETProcess/optimization/pymooAdapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def run(self, optimization_problem: OptimizationProblem, x0=None):
pop = x0
else:
pop = optimization_problem.create_initial_values(
pop_size, method='chebyshev', seed=self.seed, include_dependent_variables=True
pop_size, seed=self.seed, include_dependent_variables=True
)

pop = np.array(pop, ndmin=2)
Expand All @@ -107,7 +107,7 @@ def run(self, optimization_problem: OptimizationProblem, x0=None):
)
n_remaining = pop_size - len(pop)
remaining = optimization_problem.create_initial_values(
n_remaining, method='chebyshev', seed=self.seed, include_dependent_variables=True
n_remaining, seed=self.seed, include_dependent_variables=True
)
pop = np.vstack((pop, remaining))
elif len(pop) > pop_size:
Expand Down
2 changes: 1 addition & 1 deletion CADETProcess/optimization/scipyAdapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def callback_function(x, state=None):

if x0 is None:
x0 = optimization_problem.create_initial_values(
1, method='chebyshev', include_dependent_variables=False
1, include_dependent_variables=False
)[0]

x0_transformed = optimization_problem.transform(x0)
Expand Down
64 changes: 30 additions & 34 deletions tests/test_optimization_problem.py
Original file line number Diff line number Diff line change
Expand Up @@ -737,21 +737,19 @@ def test_add_linear_constraints(self):
self.optimization_problem.add_linear_constraint('var_0', [])

def test_initial_values(self):
x0_chebyshev_expected = [[0.2928932, 0.7071068]]
x0_chebyshev = self.optimization_problem.create_initial_values(
1, method='chebyshev'
x0_chebyshev_expected = [0.2928932, 0.7071068]
x0_chebyshev = self.optimization_problem.get_chebyshev_center(
include_dependent_variables=True
)
np.testing.assert_almost_equal(x0_chebyshev, x0_chebyshev_expected)

x0_seed_1_expected = [[0.5666524, 0.8499365]]
x0_seed_1 = self.optimization_problem.create_initial_values(
1, method='random', seed=1
1, seed=1
)
np.testing.assert_almost_equal(x0_seed_1, x0_seed_1_expected)

x0_seed_1_random = self.optimization_problem.create_initial_values(
1, method='random'
)
x0_seed_1_random = self.optimization_problem.create_initial_values(1)

with self.assertRaises(AssertionError):
np.testing.assert_almost_equal(x0_seed_1_random, x0_seed_1_expected)
Expand All @@ -772,13 +770,11 @@ def test_initial_values(self):
[0.5365699848069944, 0.6516012021958184]
]
x0_seed_10 = self.optimization_problem.create_initial_values(
10, method='random', seed=1
10, seed=1
)
np.testing.assert_almost_equal(x0_seed_10, x0_seed_10_expected)

x0_seed_10_random = self.optimization_problem.create_initial_values(
10, method='random'
)
x0_seed_10_random = self.optimization_problem.create_initial_values(10)

with self.assertRaises(AssertionError):
np.testing.assert_almost_equal(x0_seed_10_random, x0_seed_10_expected)
Expand Down Expand Up @@ -851,9 +847,9 @@ def transform():
self.assertEqual(variables_expected, variables)

def test_initial_values_without_dependencies(self):
x0_chebyshev_expected = [[0.79289322, 0.20710678, 0.5]]
x0_chebyshev = self.optimization_problem.create_initial_values(
1, method='chebyshev', include_dependent_variables=False
x0_chebyshev_expected = [0.79289322, 0.20710678, 0.5]
x0_chebyshev = self.optimization_problem.get_chebyshev_center(
include_dependent_variables=False
)
np.testing.assert_almost_equal(x0_chebyshev, x0_chebyshev_expected)

Expand All @@ -864,13 +860,13 @@ def test_initial_values_without_dependencies(self):
0.4999999999999999
]
variables = self.optimization_problem.get_dependent_values(
x0_chebyshev[0, :]
x0_chebyshev
)
np.testing.assert_almost_equal(variables, variables_expected)

self.assertTrue(
self.optimization_problem.check_linear_constraints(
x0_chebyshev[0, :], get_dependent_values=True
x0_chebyshev, get_dependent_values=True
)
)
self.assertTrue(
Expand All @@ -879,12 +875,12 @@ def test_initial_values_without_dependencies(self):

x0_seed_1_expected = [[0.7311044, 0.1727515, 0.1822629]]
x0_seed_1 = self.optimization_problem.create_initial_values(
1, method='random', seed=1, include_dependent_variables=False
1, seed=1, include_dependent_variables=False
)
np.testing.assert_almost_equal(x0_seed_1, x0_seed_1_expected)

x0_seed_1_random = self.optimization_problem.create_initial_values(
1, method='random', include_dependent_variables=False
1, include_dependent_variables=False
)

with self.assertRaises(AssertionError):
Expand All @@ -906,52 +902,52 @@ def test_initial_values_without_dependencies(self):
[0.8359314486227345, 0.39493879515319996, 0.8128182754300088]
]
x0_seed_10 = self.optimization_problem.create_initial_values(
10, method='random', seed=1, include_dependent_variables=False
10, seed=1, include_dependent_variables=False
)
np.testing.assert_almost_equal(x0_seed_10, x0_seed_10_expected)

x0_seed_10_random = self.optimization_problem.create_initial_values(
10, method='random', include_dependent_variables=False
10, include_dependent_variables=False
)

with self.assertRaises(AssertionError):
np.testing.assert_almost_equal(x0_seed_10_random, x0_seed_10_expected)

def test_initial_values(self):
x0_chebyshev_expected = [[0.79289322, 0.20710678, 0.2071068, 0.5]]
x0_chebyshev = self.optimization_problem.create_initial_values(
1, method='chebyshev', include_dependent_variables=True
x0_chebyshev_expected = [0.79289322, 0.20710678, 0.2071068, 0.5]
x0_chebyshev = self.optimization_problem.get_chebyshev_center(
include_dependent_variables=True
)

np.testing.assert_almost_equal(x0_chebyshev, x0_chebyshev_expected)

variables_expected = [
independent_variables_expected = [
0.7928932188134523,
0.2071067811865475,
0.2071067811865475,
0.4999999999999999
]
variables = self.optimization_problem.get_dependent_values(
x0_chebyshev[0, [0, 1, 3]]
independent_variables = self.optimization_problem.get_independent_values(
x0_chebyshev
)
np.testing.assert_almost_equal(variables, variables_expected)
np.testing.assert_almost_equal(independent_variables, independent_variables_expected)

self.assertTrue(
self.optimization_problem.check_linear_constraints(
x0_chebyshev[0, :], get_dependent_values=False
x0_chebyshev, get_dependent_values=False
)
)
self.assertTrue(
self.optimization_problem.check_linear_constraints(variables)
self.optimization_problem.check_linear_constraints(x0_chebyshev)
)

x0_seed_1_expected = [[0.7311044, 0.1727515, 0.1727515, 0.1822629]]
x0_seed_1 = self.optimization_problem.create_initial_values(
1, method='random', seed=1, include_dependent_variables=True
1, seed=1, include_dependent_variables=True
)
np.testing.assert_almost_equal(x0_seed_1, x0_seed_1_expected)

x0_seed_1_random = self.optimization_problem.create_initial_values(
1, method='random', include_dependent_variables=True
1, include_dependent_variables=True
)

with self.assertRaises(AssertionError):
Expand All @@ -973,12 +969,12 @@ def test_initial_values(self):
[0.8359314486227345, 0.39493879515319996, 0.39493879515319996, 0.8128182754300088]
]
x0_seed_10 = self.optimization_problem.create_initial_values(
10, method='random', seed=1, include_dependent_variables=True
10, seed=1, include_dependent_variables=True
)
np.testing.assert_almost_equal(x0_seed_10, x0_seed_10_expected)

x0_seed_10_random = self.optimization_problem.create_initial_values(
10, method='random', include_dependent_variables=True
10, include_dependent_variables=True
)

with self.assertRaises(AssertionError):
Expand Down

0 comments on commit 65b583a

Please sign in to comment.