Skip to content

Commit

Permalink
Merge pull request #78 from jfowkes/73-add-missing-sparse-routines-so…
Browse files Browse the repository at this point in the history
…bj-sgrad

Add sparse routines sobj and sgrad
  • Loading branch information
lindonroberts committed May 3, 2024
2 parents 3d3bdae + c5d5ba3 commit a95e94b
Show file tree
Hide file tree
Showing 4 changed files with 340 additions and 11 deletions.
8 changes: 6 additions & 2 deletions docs/interface.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ The methods available for each :code:`CUTEstProblem` instance are:

For large-scale problems, you may want to get vectors/matrices as sparse matrices. We have the following methods which return sparse matrices:

* `sobj(x[, gradient]) <methods/pycutest.CUTEstProblem.sobj.html>`_: (sparse) evaluate objective (and optionally its gradient)
* `sgrad(x[, index]) <methods/pycutest.CUTEstProblem.sgrad.html>`_: (sparse) evaluate objective gradient or specific constraint gradient
* `scons(x[, index, gradient]) <methods/pycutest.CUTEstProblem.scons.html>`_: (sparse) evaluate constraint(s) and optionally their Jacobian/its gradient
* `slagjac(x[, v]) <methods/pycutest.CUTEstProblem.slagjac.html>`_: (sparse) evaluate gradient of objective/Lagrangian and Jacobian of constraints
* `sphess(x[, v]) <methods/pycutest.CUTEstProblem.sphess.html>`_: (sparse) evaluate Hessian of objective or Lagrangian
Expand Down Expand Up @@ -84,10 +86,12 @@ Please click on a :code:`CUTEstProblem` method below for full documentation:
hess
ihess
hprod
gradhess
gradhess
report
sobj
sgrad
scons
slagjac
sphess
isphess
gradsphess
gradsphess
159 changes: 158 additions & 1 deletion pycutest/c_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@
static PyObject *cutest_ihess(PyObject *self, PyObject *args);
static PyObject *cutest_hprod(PyObject *self, PyObject *args);
static PyObject *cutest_gradhess(PyObject *self, PyObject *args);
static PyObject *cutest_sobj(PyObject *self, PyObject *args);
static PyObject *cutest_sgrad(PyObject *self, PyObject *args);
static PyObject *cutest_scons(PyObject *self, PyObject *args);
static PyObject *cutest_slagjac(PyObject *self, PyObject *args);
static PyObject *cutest_sphess(PyObject *self, PyObject *args);
Expand Down Expand Up @@ -141,6 +143,25 @@
return dict;
}
/* Extract sparse gradient in form of NumPy arrays */
void extract_sparse_gradient(npy_int nnzg, npy_int *si, npy_double *sv, PyArrayObject **Mgi, PyArrayObject **Mgv) {
npy_double *gv;
npy_int *gi, i;
npy_intp dims[1];
/* Alocate and fill objective gradient data,
convert indices from FORTRAN to C. */
dims[0]=nnzg;
*Mgi=(PyArrayObject *)PyArray_SimpleNew(1, dims, NPY_INT);
*Mgv=(PyArrayObject *)PyArray_SimpleNew(1, dims, NPY_DOUBLE);
gi=(npy_int *)PyArray_DATA(*Mgi);
gv=(npy_double *)PyArray_DATA(*Mgv);
for(i=0;i<nnzg;i++) {
gi[i]=si[i]-1;
gv[i]=sv[i];
}
}
/* Extract sparse gradient and Jacobian in form of NumPy arrays */
void extract_sparse_gradient_jacobian(npy_int nnzjplusno, npy_int *sji, npy_int *sjfi, npy_double *sjv,
PyArrayObject **Mgi, PyArrayObject **Mgv, PyArrayObject **MJi, PyArrayObject **MJfi, PyArrayObject **MJv) {
Expand Down Expand Up @@ -661,7 +682,7 @@
PyDoc_STRVAR(cutest_grad_doc,
"Returns the gradient of the objective or gradient of the i-th constraint at x.\n"
"\n"
"g=grad(x) -- objective gradient\n"
"g=grad(x) -- objective gradient\n"
"g=grad(x,i) -- i-th constraint gradient\n"
"\n"
"Input\n"
Expand Down Expand Up @@ -1399,6 +1420,140 @@
}
PyDoc_STRVAR(cutest_sobj_doc,
"Returns the value of objective and its sparse gradient at x (constrained problems only).\n"
"\n"
"f=sobj(x)\n"
"(f, gi, gv)=sobj(x, gradFlag)\n"
"\n"
"Input\n"
"x -- 1D array of length n with the values of variables\n"
"gradFlag -- if given the function returns f and gi,gv; can be anything\n"
"\n"
"Output\n"
"f -- float holding the value of the function at x\n"
"gi -- 1D array of integers holding the indices (0 .. n-1) of nonzero\n"
" elements in the sparse gradient vector\n"
"gv -- 1D array holding the values of nonzero elements in the sparse gradient\n"
" vector. Has the same length as gi.\n"
"\n"
"CUTEst tools used: CUTEST_cofsg\n"
);
static PyObject *cutest_sobj(PyObject *self, PyObject *args) {
PyArrayObject *arg1, *Mgi, *Mgv;
PyObject *arg2;
doublereal *x, *sv;
doublereal f;
npy_int *si;
npy_int nnzg, nzero=0;
if (!check_setup())
return NULL;
if (CUTEst_ncon == 0) {
PyErr_SetString(PyExc_Exception, "For unconstrained problems please use obj()");
return NULL;
}
if (!PyArg_ParseTuple(args, "O|O", &arg1, &arg2))
return NULL;
/* Check if x is double and of correct length and shape */
if (!(PyArray_Check(arg1) && PyArray_ISFLOAT(arg1) && PyArray_TYPE(arg1)==NPY_DOUBLE && PyArray_NDIM(arg1)==1 && PyArray_DIM(arg1, 0)==CUTEst_nvar)) {
PyErr_SetString(PyExc_Exception, "Argument 1 must be a 1D double array of length nvar");
return NULL;
}
x=(npy_double *)PyArray_DATA(arg1);
if (PyObject_Length(args)==1) {
CUTEST_cofsg((integer *)&status, (integer *)&CUTEst_nvar, x, &f, (integer *)&nnzg, (integer *)&nzero, NULL, NULL, &somethingFalse);
return Py_BuildValue("d", f);
} else {
si=(npy_int *)malloc(CUTEst_nvar*sizeof(npy_int));
sv=(npy_double *)malloc(CUTEst_nvar*sizeof(npy_double));
CUTEST_cofsg((integer *)&status, (integer *)&CUTEst_nvar, x, &f, (integer *)&nnzg, (integer *)&CUTEst_nvar, sv, (integer *)si, &somethingTrue);
extract_sparse_gradient(nnzg, si, sv, (PyArrayObject **)&Mgi, (PyArrayObject **)&Mgv);
free(si);
free(sv);
return Py_BuildValue("dOO", f, Mgi, Mgv);
}
}
PyDoc_STRVAR(cutest_sgrad_doc,
"Returns the sparse gradient of the objective or gradient of the i-th constraint at x (constrained problems only).\n"
"\n"
"(gi, gv)=sgrad(x) -- objective gradient\n"
"(gi, gv)=sgrad(x,i) -- i-th constraint gradient\n"
"\n"
"Input\n"
"x -- 1D array of length n with the values of variables\n"
"i -- integer index of constraint (between 0 and m-1)\n"
"\n"
"Output\n"
"gi -- 1D array of integers holding the indices (0 .. n-1) of nonzero\n"
" elements in the sparse gradient vector\n"
"gv -- 1D array holding the values of nonzero elements in the sparse gradient\n"
" vector. Has the same length as gi.\n"
"\n"
"CUTEst tools used: CUTEST_cisgr\n"
);
static PyObject *cutest_sgrad(PyObject *self, PyObject *args) {
PyArrayObject *arg1, *Mgi, *Mgv;
doublereal *x, *sv;
int index;
npy_int *si;
npy_int icon, nnzg;
if (!check_setup())
return NULL;
if (CUTEst_ncon == 0) {
PyErr_SetString(PyExc_Exception, "For unconstrained problems please use grad()");
return NULL;
}
if (!PyArg_ParseTuple(args, "O|i", &arg1, &index))
return NULL;
/* Check if x is double and of correct length and shape */
if (!(PyArray_Check(arg1) && PyArray_ISFLOAT(arg1) && PyArray_TYPE(arg1)==NPY_DOUBLE && PyArray_NDIM(arg1)==1 && PyArray_DIM(arg1, 0)==CUTEst_nvar)) {
PyErr_SetString(PyExc_Exception, "Argument 1 must be a 1D double array of length nvar");
return NULL;
}
/* Check index */
if (PyObject_Length(args)>1) {
if (index<0 || index>=CUTEst_ncon) {
PyErr_SetString(PyExc_Exception, "Argument 2 must be between 0 and ncon-1");
return NULL;
}
icon=index+1;
} else {
icon=0;
}
x=(npy_double *)PyArray_DATA(arg1);
si=(npy_int *)malloc(CUTEst_nvar*sizeof(npy_int));
sv=(npy_double *)malloc(CUTEst_nvar*sizeof(npy_double));
CUTEST_cisgr((integer *)&status, (integer *)&CUTEst_nvar, (integer *)&icon, x, (integer *)&nnzg, (integer *)&CUTEst_nvar, sv, (integer *)si);
extract_sparse_gradient(nnzg, si, sv, (PyArrayObject **)&Mgi, (PyArrayObject **)&Mgv);
free(si);
free(sv);
return Py_BuildValue("OO", Mgi, Mgv);
}
PyDoc_STRVAR(cutest_scons_doc,
"Returns the value of constraints and the sparse Jacobian of constraints at x.\n"
"\n"
Expand Down Expand Up @@ -2009,6 +2164,8 @@
{"ihess", cutest_ihess, METH_VARARGS, cutest_ihess_doc},
{"hprod", cutest_hprod, METH_VARARGS, cutest_hprod_doc},
{"gradhess", cutest_gradhess, METH_VARARGS, cutest_gradhess_doc},
{"sobj", cutest_sobj, METH_VARARGS, cutest_sobj_doc},
{"sgrad", cutest_sgrad, METH_VARARGS, cutest_sgrad_doc},
{"scons", cutest_scons, METH_VARARGS, cutest_scons_doc},
{"slagjac", cutest_slagjac, METH_VARARGS, cutest_slagjac_doc},
{"sphess", cutest_sphess, METH_VARARGS, cutest_sphess_doc},
Expand Down
140 changes: 132 additions & 8 deletions pycutest/problem_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -662,7 +662,131 @@ def gradhess(self, x, v=None, gradient_of_lagrangian=True):
g, H = self._module.gradhess(self.free_to_all(x))
return self.all_to_free(g), H[self.idx_free][:, self.idx_free]

# scons() wrapper (private)
# sobj() wrapper (private)
def __sobj(self, x, gradFlag=False):
"""Returns the value of the objective and optionally its sparse gradient at x.
Only works on constrained problems due to the inherent design of CUTEst.
f=__sobj(x) -- objective value
(f, g)=__sobj(x, True) -- objective value and sparse objective gradient
Input
x -- 1D array of length n with the values of variables
gradFlag -- boolean flag. If True the gradient of the objective is returned. Default is False
Output
f -- float holding the value of the objective function at x
g -- a scipy.sparse.coo_matrix of size 1-by-n_full holding the gradient of objective at x
This function is a wrapper for sobj().
"""

if gradFlag:
(f, gi, gv)=self._module.sobj(x, gradFlag)
return (f, coo_matrix((gv, (np.zeros(len(gv)), gi)), shape=(1, self.n_full)))
else:
f=self._module.sobj(x)
return f

def sobj(self, x, gradient=False):
"""
Evaluate the objective (and optionally its sparse gradient).
.. code-block:: python
# objective
f = problem.obj(x)
# objective and gradient
f, g = problem.obj(x, gradient=True)
The vector g is of type scipy.sparse.coo_matrix.
For unconstrained problems, g is formed from a dense matrix due to CUTEst limitations.
For small problems, problem.obj returns dense matrices.
This calls CUTEst routine CUTEst_uofg or CUTEST_cofsg.
:param x: input vector
:type x: numpy.ndarray with shape (n,)
:param gradient: whether to return objective and gradient, or just objective (default=False; i.e. objective only)
:type gradient: bool, optional
:return: objective value f, or tuple (f,g) of objective and sparse gradient at x
:rtype: float or (float, scipy.sparse.coo_matrix(n,))
"""
self.check_input_x(x)
if self.m <= 0: # unconstrained problem (convert dense obj)
if gradient:
f, g = self.obj(x, True) # fixed/free variables already handled
return f, coo_matrix(g) # inefficient but CUTEst gives us no choice
else:
return self.obj(x)
else: # constrained problem (use sobj wrapper)
if gradient:
f, g = self.__sobj(self.free_to_all(x), True)
return f, sparse_vec_extract_indices(g, self.idx_free)
else:
f = self.__sobj(self.free_to_all(x))
return f

# sgrad() wrapper
def __sgrad(self, x, i=None):
"""Returns the sparse gradient of the objective or gradient of the i-th constraint at x.
Only works on constrained problems due to the inherent design of CUTEst.
g=__sgrad(x) -- objective gradient
g=__sgrad(x, i) -- i-th constraint gradient
Input
x -- 1D array of length n with the values of variables
i -- integer index of constraint (between 0 and m-1)
Output
g -- a scipy.sparse.coo_matrix of size 1-by-n_full holding the gradient of objective at x or
the gradient of i-th constraint at x
This function is a wrapper for sgrad().
"""

if i is None:
(gi, gv)=self._module.sgrad(x)
else:
(gi, gv)=self._module.sgrad(x, i)
return coo_matrix((gv, (np.zeros(len(gv)), gi)), shape=(1, self.n_full))

def sgrad(self, x, index=None):
"""
Evaluate the sparse gradient of the objective function or sparse gradient of the i-th constraint.
.. code-block:: python
# gradient of objective
g = problem.grad(x)
# gradient of i-th constraint
g = problem.grad(x, index=i)
The vector g is of type scipy.sparse.coo_matrix.
For unconstrained problems, g is formed from a dense matrix due to CUTEst limitations.
For small problems, problem.grad returns dense matrices.
This calls CUTEst routine CUTEst_ugr or CUTEST_cisgr.
:param x: input vector
:type x: numpy.ndarray with shape (n,)
:param index: which constraint to evaluate. Must be in 0..self.m-1.
:type index: int, optional
:return: sparse gradient of objective or sparse gradient of i-th constraint at x
:rtype: scipy.sparse.coo_matrix(n,)
"""
self.check_input_x(x)
if self.m <= 0: # unconstrained problem (convert dense grad)
g = self.grad(x, index) # fixed/free variables already handled
return coo_matrix(g) # inefficient but CUTEst gives us no choice
else: # constrained problem (use sgrad wrapper)
g = self.__sgrad(self.free_to_all(x), index)
return sparse_vec_extract_indices(g, self.idx_free)

# scons() wrapper
def __scons(self, x, i=None):
"""Returns the value of constraints and
the sparse Jacobian of constraints at x.
Expand Down Expand Up @@ -938,15 +1062,15 @@ def __gradsphess(self, x, v=None, lagrFlag=False):
"""Returns the sparse Hessian of the Lagrangian, the sparse Jacobian of
constraints, and the gradient of the objective or Lagrangian.
(g, H)=__gradsphess(x) -- unconstrained problems
(g, J, H)=__gradsphess(x, v, gradl) -- constrained problems
(g, H)=__gradsphess(x) -- unconstrained problems
(g, J, H)=__gradsphess(x, v, lagrFlag) -- constrained problems
Input
x -- 1D array of length n with the values of variables
v -- 1D array of length m with the values of Lagrange multipliers
gradl -- boolean flag. If False the gradient of the objective is returned,
if True the gradient of the Lagrangian is returned.
Default is False
x -- 1D array of length n with the values of variables
v -- 1D array of length m with the values of Lagrange multipliers
lagrFlag -- boolean flag. If False the gradient of the objective is returned,
if True the gradient of the Lagrangian is returned.
Default is False
Output
g -- a scipy.sparse.coo_matrix of size 1-by-n_full holding the gradient of objective at x or
Expand Down

0 comments on commit a95e94b

Please sign in to comment.