Skip to content

Commit

Permalink
rename ba_init, get_params, rfun (#95)
Browse files Browse the repository at this point in the history
  • Loading branch information
pgkirsch committed Jul 9, 2021
1 parent 07b6362 commit 57af97d
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 31 deletions.
8 changes: 4 additions & 4 deletions docs/source/examples/hoburgabbeel_ex6_1_output.txt
Expand Up @@ -7,9 +7,9 @@ w**3.4411 = 0.422736 * (u_1)**-2.14843
+ 0.424169 * (u_1)**-2.14784
+ 0.15339 * (u_1)**0.584654
ISMA fit from params
1 = (0.994797/w**0.238961) * (u_1)**-0.138389
+ (0.949519/w**0.0924504) * (u_1)**0.0165798
+ (0.967646/w**0.115505) * (u_1)**-0.0131876
1 = (0.992648/w**0.35353) * (u_1)**-0.204093
+ (0.947302/w**0.0920266) * (u_1)**0.017725
+ (0.961409/w**0.11673) * (u_1)**-0.011164
MA RMS Error: 0.0023556
SMA RMS Error: 2.3856e-05
ISMA RMS Error: 1.0765e-06
ISMA RMS Error: 8.0757e-07
36 changes: 21 additions & 15 deletions gpfit/fit.py
Expand Up @@ -2,35 +2,41 @@
from numpy import ones, exp, sqrt, mean, square, hstack
from .classes import max_affine, softmax_affine, implicit_softmax_affine
from .levenberg_marquardt import levenberg_marquardt
from .ba_init import ba_init
from .initialize import get_initial_parameters
from .print_fit import print_ISMA, print_SMA, print_MA
from .constraint_set import FitConstraintSet

ALPHA_INIT = 10
RFUN = {"ISMA": implicit_softmax_affine,
"SMA": softmax_affine,
"MA": max_affine}
ALPHA0 = 10
CLASSES = {
"ISMA": implicit_softmax_affine,
"SMA": softmax_affine,
"MA": max_affine,
}


# pylint: disable=invalid-name
def get_params(ftype, K, xdata, ydata):
def get_parameters(ftype, K, xdata, ydata):
"Perform least-squares fitting optimization."
def rfun(params):

ydata_col = ydata.reshape(ydata.size, 1)
ba = get_initial_parameters(xdata, ydata_col, K).flatten('F')

def residual(params):
"A specific residual function."
[yhat, drdp] = RFUN[ftype](xdata, params)
[yhat, drdp] = CLASSES[ftype](xdata, params)
r = yhat - ydata
return r, drdp

ba = ba_init(xdata, ydata.reshape(ydata.size, 1), K).flatten('F')

if ftype == "ISMA":
params, _ = levenberg_marquardt(rfun, hstack((ba, ALPHA_INIT*ones(K))))
params, _ = levenberg_marquardt(residual, hstack((ba, ALPHA0*ones(K))))
elif ftype == "SMA":
params, _ = levenberg_marquardt(rfun, hstack((ba, ALPHA_INIT)))
params, _ = levenberg_marquardt(residual, hstack((ba, ALPHA0)))
else:
params, _ = levenberg_marquardt(rfun, ba)
params, _ = levenberg_marquardt(residual, ba)

return params


# pylint: disable=too-many-locals
# pylint: disable=too-many-branches
# pylint: disable=import-error
Expand Down Expand Up @@ -75,7 +81,7 @@ def fit(xdata, ydata, K, ftype="ISMA"):
fitdata["lb%d" % i] = exp(min(xdata.T[i]))
fitdata["ub%d" % i] = exp(max(xdata.T[i]))

params = get_params(ftype, K, xdata, ydata)
params = get_parameters(ftype, K, xdata, ydata)

# A: exponent parameters, B: coefficient parameters
A = params[[i for i in range(K*(d+1)) if i % (d + 1) != 0]]
Expand Down Expand Up @@ -125,7 +131,7 @@ def evaluate(xdata):
ydata: Dependent variable data in 1D numpy array
"""
xdata = xdata.reshape(xdata.size, 1) if xdata.ndim == 1 else xdata.T
return RFUN[ftype](xdata, params)[0]
return CLASSES[ftype](xdata, params)[0]

# cstrt.evaluate = evaluate
fitdata["rms_err"] = sqrt(mean(square(evaluate(xdata.T)-ydata)))
Expand Down
12 changes: 6 additions & 6 deletions gpfit/ba_init.py → gpfit/initialize.py
@@ -1,11 +1,11 @@
"Implements ba_init"
"Implements get_initial_parameters"
from numpy import ones, hstack, zeros, tile, argmin
from numpy.linalg import lstsq, matrix_rank
from numpy.random import permutation as randperm


# pylint: disable=too-many-locals
def ba_init(x, y, K):
def get_initial_parameters(x, y, K):
"""
Initializes max-affine fit to data (y, x)
ensures that initialization has at least K+1 points per partition (i.e.
Expand Down Expand Up @@ -49,7 +49,8 @@ def ba_init(x, y, K):
# loop through each partition, making local fits
# note we expand partitions that result in singular least squares problems
# why this way? some points will be shared by multiple partitions, but
# resulting max-affine fit will tend to be good. (as opposed to solving least-norm version)
# resulting max-affine fit will tend to be good. (as opposed to solving
# least-norm version)
for k in range(K):
inds = mindistind == k

Expand All @@ -73,11 +74,10 @@ def ba_init(x, y, K):
inds[sortdistind[i]] = 1

if options['bverbose']:
print("ba_init: Added %s points to partition %s to maintain"
"full rank for local fitting." % (i-iinit, k))
print("Initialization: Added %s points to partition %s to "
"maintain full rank for local fitting." % (i-iinit, k))
# now create the local fit
b[:, k] = lstsq(X[inds.nonzero()], y[inds.nonzero()], rcond=-1)[0][:, 0]
# Rank condition specified to default for python upgrades


return b
4 changes: 2 additions & 2 deletions gpfit/tests/run_tests.py
Expand Up @@ -8,8 +8,8 @@
from gpfit.tests import t_logsumexp
TESTS += t_logsumexp.TESTS

from gpfit.tests import t_ba_init
TESTS += t_ba_init.TESTS
from gpfit.tests import t_initialize
TESTS += t_initialize.TESTS

from gpfit.tests import t_classes
TESTS += t_classes.TESTS
Expand Down
8 changes: 4 additions & 4 deletions gpfit/tests/t_ba_init.py → gpfit/tests/t_initialize.py
@@ -1,9 +1,9 @@
"unit tests for ba_init function"
"unit tests for get_initial_parameters function"
import unittest
import numpy as np
from numpy import arange, newaxis, vstack, log, exp
from numpy.random import random_sample
from gpfit.ba_init import ba_init
from gpfit.initialize import get_initial_parameters

SEED = 33404

Expand All @@ -16,7 +16,7 @@ class TestMaxAffineInitK2(unittest.TestCase):
x = arange(0., 16.)[:, newaxis]
y = arange(0., 16.)[:, newaxis]
K = 2
ba = ba_init(x, y, K)
ba = get_initial_parameters(x, y, K)

def test_ba_ndim_k2(self):
self.assertEqual(self.ba.ndim, 2)
Expand All @@ -43,7 +43,7 @@ class TestMaxAffineInitK4(unittest.TestCase):
y = y.reshape(y.size, 1)
K = 4

ba = ba_init(x, y, K)
ba = get_initial_parameters(x, y, K)

def test_ba_shape_k4(self):
self.assertEqual(self.ba.shape, (3, 4))
Expand Down

0 comments on commit 57af97d

Please sign in to comment.