Skip to content

Commit

Permalink
Provisional minimax
Browse files Browse the repository at this point in the history
  • Loading branch information
jeffrey-hokanson committed Aug 14, 2020
1 parent 0755e14 commit b91a192
Show file tree
Hide file tree
Showing 3 changed files with 151 additions and 2 deletions.
71 changes: 71 additions & 0 deletions polyrat/rational_ratio.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import numpy as np
import scipy.optimize
from scipy.optimize import LinearConstraint, NonlinearConstraint, Bounds

################################################################################
# Residual and jacobian
Expand Down Expand Up @@ -55,9 +56,79 @@ def _rational_jacobian_complex(x, P, Q):
JRI[0::2,1::2] = -J.imag
JRI[1::2,0::2] = J.imag
return JRI


# setup the objective for the constraint
def _rational_residual_squared_abs_complex(x, P, Q, y):
r = _rational_residual_complex(x, P, Q, y)
return r[::2]**2 + r[1::2]**2

def _rational_jacobian_squared_abs_complex(x, P, Q, y):
r = _rational_residual_complex(x, P, Q, y)
J = _rational_jacobian_complex(x, P, Q)
Jt = np.zeros((r.shape[0]//2, x.shape[0]))
Jt += 2*np.diag(r[0::2]) @ J[0::2,:]
Jt += 2*np.diag(r[1::2]) @ J[1::2,:]
return Jt

def _rational_ratio_inf_complex(y, P, Q, a0, b0):
r"""
Solving as
min_{x, t} 0.5 * t**2
st. Re[f_j(x)]^2 + Im[f_j(x)]^2 - t^2 <= 0
"""

# Introduce a slack variable t to represent the maximum
fun = lambda xt: 0.5*xt[-1]**2
def grad(xt):
g = np.zeros(xt.shape)
g[-1] = xt[-1]
return g

def hess(xt):
H = np.zeros( (len(xt), len(xt)) )
H[-1,-1] = 1
return H

def con(xt):
x = xt[:-1]
t = xt[-1]
r = _rational_residual_squared_abs_complex(x, P, Q, y) - t**2
return r

def con_jac(xt):
x = xt[:-1]
t = xt[-1]
J = _rational_jacobian_squared_abs_complex(x, P, Q, y)
Jt = np.hstack([J, -2*t*np.ones((J.shape[0], 1)) ])
return Jt

lb = -np.inf*np.ones(P.shape[0])
ub = np.zeros(P.shape[0])
constraint = NonlinearConstraint(con, lb, ub, jac = con_jac)


t0 = np.max(np.abs( (P @ a0)/(Q @ b0) - y))
xt0 = np.hstack([a0.view(float), b0.view(float), t0])

lb_var = -np.inf*np.ones(xt0.shape)
lb_var[-1] = 0
ub_var = np.inf*np.ones(xt0.shape)
print("t0", t0)

# TODO: Replace with a call to SLP
res = scipy.optimize.minimize(fun, xt0, jac = grad, constraints = constraint, method = 'cobyla',
bounds = Bounds(lb_var, ub_var), options = {'iprint': 10, 'disp':True})
xt = res.x
x = xt[:-1]
a = x[:2*P.shape[1]].view(complex)
b = x[-2*Q.shape[1]:].view(complex)
print("t", xt[-1])
print("con", np.max(con(xt)))
return a, b

def rational_ratio_optimize(y, P, Q, a0, b0, norm = 2):
r""" Find a locally optimal rational approximation in ratio form
Expand Down
27 changes: 27 additions & 0 deletions polyrat/slp.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
""" Sequential linear programing approach for minimax optimization
"""

import numpy as np
from iterprinter import IterationPrinter


def slq(obj, x0, jac, constraints):
r"""
Solves
min_x \| res(x) \|_\infty
where
|res(x)[j]| \approx | res(x)[j] + jac(x)[j] |
"""

if verbose:
iterprinter = IterationPrinter(it = '4d')
iterprinter.print_header(it = 'iter')



55 changes: 53 additions & 2 deletions tests/test_rational_ratio.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
from polyrat import *
from polyrat.rational_ratio import _rational_residual_real, _rational_jacobian_real
from polyrat.rational_ratio import _rational_residual_complex, _rational_jacobian_complex
from polyrat.rational_ratio import _rational_residual_squared_abs_complex, _rational_jacobian_squared_abs_complex
from polyrat.rational_ratio import _rational_ratio_inf_complex

from .checkjac import *
from .test_data import *
Expand Down Expand Up @@ -34,7 +36,6 @@ def test_rational_jacobian_real_arnoldi():
print(f"maximum error {err:8.2e}")
assert err < 1e-8, "Error in the Jacobian"


def test_rational_jacobian_complex_arnoldi():
M = 1000
X, y = absolute_value(M, complex_ = True)
Expand Down Expand Up @@ -90,7 +91,57 @@ def test_rational_jacobian(M, dim, num_degree, denom_degree, complex_, seed):



def test_rational_residual_squared_abs_complex():
M = 1000
dim = 2
num_degree = 3
denom_degree = 4

X, y = random_data(M, dim, complex_ = True, seed = 0)

P = LegendrePolynomialBasis(X, num_degree).basis()
Q = LegendrePolynomialBasis(X, denom_degree).basis()

x = np.random.randn(2*(P.shape[1]+Q.shape[1]))

res = lambda x: _rational_residual_squared_abs_complex(x, P, Q, y)
jac = lambda x: _rational_jacobian_squared_abs_complex(x, P, Q, y)

err = check_jacobian(x, res, jac, relative = True)
assert err < 1e-7


def test_rational_ratio_inf_complex():
M = 1000
dim = 1
complex_ = True
seed = 0
num_degree = 20
denom_degree = 20

M = 1000
X, y = random_data(M, dim, complex_, seed)

sk = SKRationalApproximation(num_degree, denom_degree, refine = False, maxiter = 5, rebase= True)
sk.fit(X, y)
print(sk.a)
print(sk.b)

# Compute the error for the SK iteration
err_old = np.max(np.abs(sk(X) -y))

a, b = _rational_ratio_inf_complex(y, sk.P, sk.Q, sk.a, sk.b)
print(a)
print(b)
# This should have improved the fit
err = np.max(np.abs( (sk.P @ a)/(sk.Q @ b) -y))

print(f"old error {err_old:8.2e}")
print(f"new error {err:8.2e}")
assert err < err_old, "Optimization should have improved the solution"
#assert False


if __name__ == '__main__':
test_rational_jacobian(1000, 2, 4, 5, True, 0)
#test_rational_jacobian(1000, 2, 4, 5, True, 0)
test_rational_inf_constraint_complex()

0 comments on commit b91a192

Please sign in to comment.