Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Return the new cost and slack variables from AddMaximizeLogDeterminantSymmetricMatrixCost #16296

Merged
merged 1 commit into from
Jan 5, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion bindings/pydrake/solvers/mathematicalprogram_py.cc
Original file line number Diff line number Diff line change
Expand Up @@ -875,7 +875,9 @@ void BindMathematicalProgram(py::module m) {
py::arg("b"), py::arg("vars"),
doc.MathematicalProgram.AddL2NormCostUsingConicConstraint.doc)
.def("AddMaximizeLogDeterminantSymmetricMatrixCost",
static_cast<void (MathematicalProgram::*)(
static_cast<std::tuple<Binding<LinearCost>,
VectorX<symbolic::Variable>, MatrixX<symbolic::Expression>> (
MathematicalProgram::*)(
const Eigen::Ref<const MatrixX<symbolic::Expression>>& X)>(
&MathematicalProgram::
AddMaximizeLogDeterminantSymmetricMatrixCost),
Expand Down
5 changes: 4 additions & 1 deletion bindings/pydrake/solvers/test/mathematicalprogram_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -686,7 +686,10 @@ def test_log_determinant(self):
for i in range(3):
pt = pts[i, :]
prog.AddLinearConstraint(pt.dot(X.dot(pt)) <= 1)
prog.AddMaximizeLogDeterminantSymmetricMatrixCost(X)
linear_cost, log_det_t, log_det_Z = \
prog.AddMaximizeLogDeterminantSymmetricMatrixCost(X=X)
self.assertEqual(log_det_t.shape, (2,))
self.assertEqual(log_det_Z.shape, (2, 2))
result = mp.Solve(prog)
self.assertTrue(result.is_success())

Expand Down
7 changes: 5 additions & 2 deletions solvers/mathematical_program.cc
Original file line number Diff line number Diff line change
Expand Up @@ -522,7 +522,9 @@ Binding<Cost> MathematicalProgram::AddCost(const Expression& e) {
return AddCost(internal::ParseCost(e));
}

void MathematicalProgram::AddMaximizeLogDeterminantSymmetricMatrixCost(
std::tuple<Binding<LinearCost>, VectorX<symbolic::Variable>,
MatrixX<symbolic::Expression>>
MathematicalProgram::AddMaximizeLogDeterminantSymmetricMatrixCost(
const Eigen::Ref<const MatrixX<symbolic::Expression>>& X) {
DRAKE_DEMAND(X.rows() == X.cols());
const int X_rows = X.rows();
Expand Down Expand Up @@ -554,7 +556,8 @@ void MathematicalProgram::AddMaximizeLogDeterminantSymmetricMatrixCost(
Vector3<symbolic::Expression>(Z(i, i), 1, t(i)));
}

AddLinearCost(-t.cast<symbolic::Expression>().sum());
const auto cost = AddLinearCost(-Eigen::VectorXd::Ones(t.rows()), t);
return std::make_tuple(cost, std::move(t), std::move(Z));
}

void MathematicalProgram::AddMaximizeGeometricMeanCost(
Expand Down
11 changes: 10 additions & 1 deletion solvers/mathematical_program.h
Original file line number Diff line number Diff line change
Expand Up @@ -1205,12 +1205,21 @@ class MathematicalProgram {
* and we will minimize -∑ᵢt(i).
* @param X A symmetric positive semidefinite matrix X, whose log(det(X)) will
* be maximized.
* @return (cost, t, Z) cost is -∑ᵢt(i), we also return the newly created
* slack variables t and the lower triangular matrix Z. Note that Z is not a
* matrix of symbolic::Variable but symbolic::Expression, because the
* upper-diagonal entries of Z are not variable, but expression 0.
* @pre X is a symmetric matrix.
* @note We implicitly require that `X` being positive semidefinite (psd) (as
* X is the diagonal entry of the big psd matrix above). If your `X` is not
* necessarily psd, then don't call this function.
* @note The constraint log(Z(i, i)) >= t(i) is imposed as an exponential cone
* constraint. Please make sure your have a solver that supports exponential
* cone constraint (currently SCS does).
*/
void AddMaximizeLogDeterminantSymmetricMatrixCost(
std::tuple<Binding<LinearCost>, VectorX<symbolic::Variable>,
MatrixX<symbolic::Expression>>
AddMaximizeLogDeterminantSymmetricMatrixCost(
const Eigen::Ref<const MatrixX<symbolic::Expression>>& X);

/**
Expand Down
18 changes: 16 additions & 2 deletions solvers/test/exponential_cone_program_examples.cc
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,9 @@ void MinimalEllipsoidCoveringPoints(const SolverInterface& solver, double tol) {
ellipsoid_psd << S, b.cast<symbolic::Expression>() / 2,
b.cast<symbolic::Expression>().transpose() / 2, c;
prog.AddPositiveSemidefiniteConstraint(ellipsoid_psd);
prog.AddMaximizeLogDeterminantSymmetricMatrixCost(
S.cast<symbolic::Expression>());
const auto [linear_cost, log_det_t, log_det_Z] =
prog.AddMaximizeLogDeterminantSymmetricMatrixCost(
S.cast<symbolic::Expression>());
for (int i = 0; i < 4; ++i) {
prog.AddLinearConstraint(
pts.col(i).dot(S.cast<symbolic::Expression>() * pts.col(i)) +
Expand All @@ -122,6 +123,19 @@ void MinimalEllipsoidCoveringPoints(const SolverInterface& solver, double tol) {
const double expected_cost =
-std::log(0.25 / std::pow(scaling_factor(0) * scaling_factor(1), 2));
EXPECT_NEAR(result.get_optimal_cost(), expected_cost, tol);
EXPECT_NEAR(result.EvalBinding(linear_cost)(0), expected_cost, tol);
const Eigen::VectorXd log_det_t_sol = result.GetSolution(log_det_t);
EXPECT_NEAR(log_det_t_sol.sum(), -expected_cost, tol);
Eigen::MatrixXd log_det_Z_sol =
ExtractDoubleOrThrow(result.GetSolution(log_det_Z));
EXPECT_TRUE(
(log_det_Z_sol.diagonal().array().log() >= log_det_t_sol.array() - tol)
.all());
EXPECT_TRUE(CompareMatrices(
Eigen::TriangularView<Eigen::MatrixXd, Eigen::StrictlyUpper>(
log_det_Z_sol)
.toDenseMatrix(),
Eigen::MatrixXd::Zero(log_det_Z_sol.rows(), log_det_Z_sol.cols())));
EXPECT_NEAR(-std::log(S_sol.determinant()), expected_cost, tol);
}
} // namespace test
Expand Down