Skip to content

Commit

Permalink
Merge pull request #45 from changlan/nelder-mead
Browse files Browse the repository at this point in the history
Add the Nelder-Mead algorithm
  • Loading branch information
shansfolder committed May 22, 2017
2 parents fb4dc14 + ac4ef72 commit 26a04a9
Show file tree
Hide file tree
Showing 3 changed files with 94 additions and 52 deletions.
36 changes: 14 additions & 22 deletions pyGPs/Core/gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def setData(self, x, y):
:param y: training labels in shape (n,1)
Note this method will transform x, y to correct shape
if x, y is given in 1d array.
if x, y is given in 1d array.
'''
# check wether the number of inputs and labels match
assert x.shape[0] == y.shape[0], "number of inputs and labels does not match"
Expand Down Expand Up @@ -177,7 +177,7 @@ def plotData_1d(self, axisvals=None):
def plotData_2d(self,x1,x2,t1,t2,p1,p2,axisvals=None):
'''
Toy Method for ploting 2d data of the model. \n
For plotting, we superimpose the data points with the posterior equi-probability contour
For plotting, we superimpose the data points with the posterior equi-probability contour
lines for the probability of class two given complete information about the generating mechanism.
:param x1: inputs for class +1
Expand Down Expand Up @@ -257,7 +257,7 @@ def optimize(self, x=None, y=None, numIterations=40):
:param y: training labels in shape (n,1)
'''
# check wether the number of inputs and labels match
if x is not None and y is not None:
if x is not None and y is not None:
assert x.shape[0] == y.shape[0], "number of inputs and labels does not match"

# check the shape of inputs
Expand Down Expand Up @@ -307,7 +307,7 @@ def getPosterior(self, x=None, y=None, der=True):
'''

# check wether the number of inputs and labels match
if x is not None and y is not None:
if x is not None and y is not None:
assert x.shape[0] == y.shape[0], "number of inputs and labels does not match"

# check the shape of inputs
Expand All @@ -325,7 +325,7 @@ def getPosterior(self, x=None, y=None, der=True):
if self.usingDefaultMean and self.meanfunc is None:
c = np.mean(y)
self.meanfunc = mean.Const(c) # adapt default prior mean wrt. training labels

# call inference method
if isinstance(self.likfunc, lik.Erf): #or is instance(self.likfunc, lik.Logistic):
uy = unique(self.y)
Expand Down Expand Up @@ -402,7 +402,7 @@ def predict(self, xs, ys=None):
while nact<=ns-1: # process minibatches of test cases to save memory
id = list(range(nact,min(nact+nperbatch,ns))) # data points to process
kss = covfunc.getCovMatrix(z=xs[id,:], mode='self_test') # self-variances
if isinstance(covfunc, FITCOfKernel):
if isinstance(covfunc, FITCOfKernel):
Ks = covfunc.getCovMatrix(x=x, z=xs[id,:], mode='cross') # cross-covariances
Ks = Ks[nz,:]
else:
Expand Down Expand Up @@ -440,7 +440,7 @@ def predict(self, xs, ys=None):

def predict_with_posterior(self, post, xs, ys=None):
'''
Prediction of test points (given by xs) based on training data
Prediction of test points (given by xs) based on training data
of the current model with posterior already provided.
(i.e. you already have the posterior and thus don't need the fitting phase.)
This method will output the following value:\n
Expand Down Expand Up @@ -576,12 +576,8 @@ def setOptimizer(self, method, num_restarts=None, min_threshold=None, meanRange=
self.optimizer = opt.CG(self,conf)
elif method == "BFGS":
self.optimizer = opt.BFGS(self,conf)
elif method == "LBFGSB":
self.optimizer = opt.LBFGSB(self, conf)
elif method == "COBYLA":
self.optimizer = opt.COBYLA(self, conf)
elif method == "RTMinimize":
self.optimizer = opt.RTMinimize(self, conf)
elif method == "Nelder-Mead":
self.optimizer = opt.Simplex(self, conf)
else:
raise Exception('Optimization method is not set correctly in setOptimizer')

Expand Down Expand Up @@ -686,7 +682,7 @@ def plot(self,x1,x2,t1,t2,axisvals=None):
'''
Plot 2d GP Classification result.
For plotting, we superimpose the data points with the posterior equi-probability contour
For plotting, we superimpose the data points with the posterior equi-probability contour
lines for the probability of class two given complete information about the generating mechanism.
:param x1: inputs for class +1
Expand Down Expand Up @@ -813,7 +809,7 @@ def setData(self,x,y):
:param y: training labels in shape (n,1)
Note this method will transform x, y to correct shape
if x, y is given in 1d array.
if x, y is given in 1d array.
'''
# check wether the number of inputs and labels match
assert x.shape[0] == y.shape[0], "number of inputs and labels does not match"
Expand Down Expand Up @@ -907,7 +903,7 @@ def optimizeAndPredict(self, xs):


def createBinaryClass(self, i,j):
'''
'''
Create dataset x(data) and y(label) which only contains class i and j.
Relabel class i to +1 and class j to -1
Expand Down Expand Up @@ -955,7 +951,7 @@ def setData(self, x, y, value_per_axis=5):
when using a uni-distant default inducing points
Note this method will transform x, y to correct shape
if x, y is given in 1d array.
if x, y is given in 1d array.
'''
# check wether the number of inputs and labels match
assert x.shape[0] == y.shape[0], "number of inputs and labels does not match"
Expand Down Expand Up @@ -1164,7 +1160,7 @@ def setOptimizer(self, method, num_restarts=None, min_threshold=None, meanRange=

def plot(self,x1,x2,t1,t2,axisvals=None):
'''Plot 2d GP FITC classification.
For plotting, we superimpose the data points with the posterior equi-probability contour
For plotting, we superimpose the data points with the posterior equi-probability contour
lines for the probability of class two given complete information about the generating mechanism.
:param x1: inputs for class +1
Expand Down Expand Up @@ -1212,7 +1208,3 @@ def useLikelihood(self,newLik):
raise Exception("Logistic likelihood is currently not implemented.")
else:
raise Exception('Possible lik values are "Logistic".')




81 changes: 70 additions & 11 deletions pyGPs/Core/opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,12 @@
import numpy as np
from scipy.optimize import fmin_bfgs as bfgs
from scipy.optimize import fmin_cg as cg
from scipy.optimize import fmin as simplex
from pyGPs.Optimization import minimize, scg
from copy import deepcopy
import logging


class Optimizer(object):
def __init__(self, model=None, searchConfig = None):
self.model = model
Expand Down Expand Up @@ -87,6 +89,66 @@ def _apply_in_objects(self, hypInArray):
self.model.likfunc.hyp = hypInList[(Lm+Lc):]


class Simplex(Optimizer):
'''Downhill simplex algorithm by Nelder-Mead'''
def __init__(self, model, searchConfig = None):
super(Simplex, self).__init__()
self.model = model
self.searchConfig = searchConfig
self.trailsCounter = 0
self.errorCounter = 0

def findMin(self, x, y, numIters = 100):
meanfunc = self.model.meanfunc
covfunc = self.model.covfunc
likfunc = self.model.likfunc
inffunc = self.model.inffunc
hypInArray = self._convert_to_array()
try:
opt = simplex(self._nlml, hypInArray, maxiter=numIters, disp=False, full_output=True)
optimalHyp = deepcopy(opt[0])
funcValue = opt[1]
warnFlag = opt[4]
if warnFlag == 1:
self.logger.warning("Maximum number of function evaluations made")
elif warnFlag == 2:
self.logger.warning("Maximum number of iterations exceeded.")
except:
self.errorCounter += 1
if not self.searchConfig:
raise Exception("Can not learn hyperparamters using Nelder-Mead.")
self.trailsCounter += 1

if self.searchConfig:
searchRange = self.searchConfig.meanRange + self.searchConfig.covRange + self.searchConfig.likRange
if not (self.searchConfig.num_restarts or self.searchConfig.min_threshold):
raise Exception('Specify at least one of the stop conditions')
while True:
self.trailsCounter += 1 # increase counter
for i in range(hypInArray.shape[0]): # random init of hyp
hypInArray[i] = np.random.uniform(low=searchRange[i][0], high=searchRange[i][1])
# value this time is better than optiaml min value

try:
thisopt = simplex(self._nlml, hypInArray, maxiter=numIters, disp=False, full_output=True)
if thisopt[1] < funcValue:
funcValue = thisopt[1]
optimalHyp = thisopt[0]
except:
self.errorCounter += 1

if self.searchConfig.num_restarts and self.errorCounter > old_div(self.searchConfig.num_restarts, 2):
self.logger.warning("[Simplex] %d out of %d trails failed during optimization", self.errorCounter, self.trailsCounter)
raise Exception("Over half of the trails failed for Nelder-Mead")
if self.searchConfig.num_restarts and self.trailsCounter > self.searchConfig.num_restarts-1: # if exceed num_restarts
self.logger.warning("[Simplex] %d out of %d trails failed during optimization", self.errorCounter, self.trailsCounter)
return optimalHyp, funcValue
if self.searchConfig.min_threshold and funcValue <= self.searchConfig.min_threshold: # reach provided mininal
self.logger.warning("[Simplex] %d out of %d trails failed during optimization", self.errorCounter, self.trailsCounter)
return optimalHyp, funcValue
return optimalHyp, funcValue


class CG(Optimizer):
'''Conjugent gradient'''
def __init__(self, model, searchConfig = None):
Expand All @@ -113,12 +175,12 @@ def findMin(self, x, y, numIters = 100):
self.logger.warning("Gradient and/or function calls not changing.")
except:
self.errorCounter += 1
if not self.searchConfig:
if not self.searchConfig:
raise Exception("Can not learn hyperparamters using conjugate gradient.")
self.trailsCounter += 1

if self.searchConfig:
searchRange = self.searchConfig.meanRange + self.searchConfig.covRange + self.searchConfig.likRange
searchRange = self.searchConfig.meanRange + self.searchConfig.covRange + self.searchConfig.likRange
if not (self.searchConfig.num_restarts or self.searchConfig.min_threshold):
raise Exception('Specify at least one of the stop conditions')
while True:
Expand All @@ -127,7 +189,7 @@ def findMin(self, x, y, numIters = 100):
hypInArray[i]= np.random.uniform(low=searchRange[i][0], high=searchRange[i][1])
# value this time is better than optiaml min value
try:
thisopt = cg(self._nlml, hypInArray, self._dnlml, maxiter=100, disp=False, full_output=True)
thisopt = cg(self._nlml, hypInArray, self._dnlml, maxiter=numIters, disp=False, full_output=True)
if thisopt[1] < funcValue:
funcValue = thisopt[1]
optimalHyp = thisopt[0]
Expand All @@ -141,7 +203,7 @@ def findMin(self, x, y, numIters = 100):
return optimalHyp, funcValue
if self.searchConfig.min_threshold and funcValue <= self.searchConfig.min_threshold: # reach provided mininal
self.logger.warning("[CG] %d out of %d trails failed during optimization", self.errorCounter, self.trailsCounter)
return optimalHyp, funcValue
return optimalHyp, funcValue
return optimalHyp, funcValue


Expand Down Expand Up @@ -173,13 +235,13 @@ def findMin(self, x, y, numIters = 100):
self.logger.warning("Gradient and/or function calls not changing.")
except:
self.errorCounter += 1
if not self.searchConfig:
if not self.searchConfig:
raise Exception("Can not learn hyperparamters using BFGS.")
self.trailsCounter += 1


if self.searchConfig:
searchRange = self.searchConfig.meanRange + self.searchConfig.covRange + self.searchConfig.likRange
searchRange = self.searchConfig.meanRange + self.searchConfig.covRange + self.searchConfig.likRange
if not (self.searchConfig.num_restarts or self.searchConfig.min_threshold):
raise Exception('Specify at least one of the stop conditions')
while True:
Expand All @@ -188,7 +250,7 @@ def findMin(self, x, y, numIters = 100):
hypInArray[i]= np.random.uniform(low=searchRange[i][0], high=searchRange[i][1])
# value this time is better than optiaml min value
try:
thisopt = bfgs(self._nlml, hypInArray, self._dnlml, maxiter=100, disp=False, full_output=True)
thisopt = bfgs(self._nlml, hypInArray, self._dnlml, maxiter=numIters, disp=False, full_output=True)
if thisopt[1] < funcValue:
funcValue = thisopt[1]
optimalHyp = thisopt[0]
Expand Down Expand Up @@ -316,9 +378,6 @@ def findMin(self, x, y, numIters = 100):
return optimalHyp, funcValue
if self.searchConfig.min_threshold and funcValue <= self.searchConfig.min_threshold: # reach provided mininal
self.logger.warning("[SCG] %d out of %d trails failed during optimization" , self.errorCounter, self.trailsCounter)
return optimalHyp, funcValue
return optimalHyp, funcValue

return optimalHyp, funcValue



29 changes: 10 additions & 19 deletions pyGPs/Testing/unit_test_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,60 +16,54 @@
import numpy as np
import pyGPs


class OptTests(unittest.TestCase):

def setUp(self):
# fix random seed
np.random.seed(0)

# random data for testing
n = 20 # number of inputs
D = 3 # dimension of inputs
self.x = np.random.normal(loc=0.0, scale=1.0, size=(n,D))
self.x = np.random.normal(loc=0.0, scale=1.0, size=(n, D))
self.y = np.random.random((n,))
self.model = pyGPs.GPR()
nlZ, dnlZ, post = self.model.getPosterior(self.x,self.y)
nlZ, dnlZ, post = self.model.getPosterior(self.x, self.y)
self.nlZ_beforeOpt = nlZ



def checkOptimizer(self, optimizer):
# funcValue is the minimal negative-log-marginal-likelihood during optimization,
# and optimalHyp is a flattened numpy array
optimalHyp, funcValue = optimizer.findMin(self.x, self.y)
self.assertTrue(funcValue < self.nlZ_beforeOpt)
print("optimal hyperparameters in flattened array:", optimalHyp)

self.assertTrue(funcValue < self.nlZ_beforeOpt)
print("optimal hyperparameters in flattened array:", optimalHyp)

def test_Simplex(self):
print("testing Simplex by Nelder-Mead...")
optimizer = pyGPs.Core.opt.Simplex(self.model)
self.checkOptimizer(optimizer)

def test_CG(self):
print("testing Conjugent gradient...")
optimizer = pyGPs.Core.opt.CG(self.model)
self.checkOptimizer(optimizer)



def test_BFGS(self):
print("testing quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS)...")
optimizer = pyGPs.Core.opt.BFGS(self.model)
self.checkOptimizer(optimizer)



def test_SCG(self):
print("testing Scaled conjugent gradient...")
optimizer = pyGPs.Core.opt.SCG(self.model)
self.checkOptimizer(optimizer)



def test_Minimize(self):
print("testing minimize by Carl Rasmussen ...")
optimizer = pyGPs.Core.opt.Minimize(self.model)
self.checkOptimizer(optimizer)



# Test your customized mean function
'''
def test_MyOptimizer(self):
Expand All @@ -79,9 +73,6 @@ def test_MyOptimizer(self):
'''




if __name__ == "__main__":
print("Running unit tests...")
unittest.main()

0 comments on commit 26a04a9

Please sign in to comment.