Skip to content

Commit

Permalink
Fixup for optimizer docs (will be squashed).
Browse files Browse the repository at this point in the history
  • Loading branch information
TallJimbo committed May 12, 2016
1 parent 54c2674 commit 0daccd0
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 17 deletions.
11 changes: 7 additions & 4 deletions include/lsst/meas/modelfit/optimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ class OptimizerObjective {
* compute analytic derivatives are not supported.
*
* @param[in] parameters An array of parameters with shape (parameterSize).
* @param[out] deerivatives Output array that will contain d(model - data)/d(parameters) on
* @param[out] derivatives Output array that will contain d(model - data)/d(parameters) on
* return. Must be allocated to shape (dataSize, parameterSize),
* but need not be initialized.
*/
Expand All @@ -212,12 +212,12 @@ class OptimizerObjective {
* The default implementation simply returns 1.0 (appropriate for an unnormalized constant prior,
* which is mathematically equivalent to no prior).
*/
virtual Scalar computePrior(ndarray::Array<Scalar const,1,1> const & parameters) const;
virtual Scalar computePrior(ndarray::Array<Scalar const,1,1> const & parameters) const { return 1.0; }

/**
* Compute the first and second derivatives of the Bayesian prior with respect to the parameters.
*
* The default implementation simply returns 0.0 (appropriate for an constant prior,
* The default implementation simply sets the output arrays to 0.0 (appropriate for an constant prior,
* which is mathematically equivalent to no prior).
*
* @param[in] parameters An array of parameters with shape (parameterSize).
Expand All @@ -232,7 +232,10 @@ class OptimizerObjective {
ndarray::Array<Scalar const,1,1> const & parameters,
ndarray::Array<Scalar,1,1> const & gradient,
ndarray::Array<Scalar,2,1> const & hessian
) const;
) const {
gradient.deep() = 0.0;
hessian.deep() = 0.0;
}

virtual ~OptimizerObjective() {}
};
Expand Down
13 changes: 0 additions & 13 deletions src/optimizer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -271,19 +271,6 @@ PTR(OptimizerObjective) OptimizerObjective::makeFromLikelihood(
return boost::make_shared<LikelihoodOptimizerObjective>(likelihood, prior);
}

Scalar OptimizerObjective::computePrior(ndarray::Array<Scalar const,1,1> const & parameters) const {
return 1.0;
}

void OptimizerObjective::differentiatePrior(
ndarray::Array<Scalar const,1,1> const & parameters,
ndarray::Array<Scalar,1,1> const & gradient,
ndarray::Array<Scalar,2,1> const & hessian
) const {
gradient.deep() = 0.0;
hessian.deep() = 0.0;
}

// ----------------- OptimizerIterationData -----------------------------------------------------------------

OptimizerIterationData::OptimizerIterationData(int dataSize, int parameterSize) :
Expand Down

0 comments on commit 0daccd0

Please sign in to comment.