Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions examples/snopt_bugfix.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,7 @@ def objfunc(xdict):


def sens(xdict, funcs):
"""f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
"""
"""f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3"""
x = xdict["x"]
y = xdict["y"]
funcsSens = {}
Expand Down
12 changes: 6 additions & 6 deletions pyoptsparse/postprocessing/OptView_baseclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,10 +253,10 @@ def DetermineMajorIterations(self, db, OpenMDAO):
self.iter_type[:] = 1.0

def SaveDBData(self, db, data_all, data_major, OpenMDAO, data_str):
""" Method to save the information within the database corresponding
to a certain key to the relevant dictionaries within the Display
object. This method is called twice, once for the design variables
and the other for the outputs. """
"""Method to save the information within the database corresponding
to a certain key to the relevant dictionaries within the Display
object. This method is called twice, once for the design variables
and the other for the outputs."""

# Loop over each optimization iteration
for i, iter_type in enumerate(self.iter_type):
Expand Down Expand Up @@ -297,8 +297,8 @@ def SaveDBData(self, db, data_all, data_major, OpenMDAO, data_str):
data_major[new_key].append(data)

def SaveOpenMDAOData(self, db):
""" Examine the OpenMDAO dict and save tags if the variables are
objectives (o), constraints (c), or design variables (dv). """
"""Examine the OpenMDAO dict and save tags if the variables are
objectives (o), constraints (c), or design variables (dv)."""

# Loop over each key in the metadata db
for tag in db:
Expand Down
12 changes: 6 additions & 6 deletions pyoptsparse/postprocessing/OptView_dash.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,10 +250,10 @@ def DetermineMajorIterations(self, db, OpenMDAO):
self.iter_type[:] = 1.0

def SaveDBData(self, db, data_all, data_major, OpenMDAO, data_str):
""" Method to save the information within the database corresponding
to a certain key to the relevant dictionaries within the Display
object. This method is called twice, once for the design variables
and the other for the outputs. """
"""Method to save the information within the database corresponding
to a certain key to the relevant dictionaries within the Display
object. This method is called twice, once for the design variables
and the other for the outputs."""

# Loop over each optimization iteration
for i, iter_type in enumerate(self.iter_type):
Expand Down Expand Up @@ -294,8 +294,8 @@ def SaveDBData(self, db, data_all, data_major, OpenMDAO, data_str):
data_major[new_key].append(data)

def SaveOpenMDAOData(self, db):
""" Examine the OpenMDAO dict and save tags if the variables are
objectives (o), constraints (c), or design variables (dv). """
"""Examine the OpenMDAO dict and save tags if the variables are
objectives (o), constraints (c), or design variables (dv)."""

# Loop over each key in the metadata db
for tag in db:
Expand Down
2 changes: 1 addition & 1 deletion pyoptsparse/pyCONMIN/pyCONMIN.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def __call__(
storeSens : bool
Flag sepcifying if sensitivities are to be stored in hist.
This is necessay for hot-starting only.
"""
"""

self.callCounter = 0
self.storeSens = storeSens
Expand Down
2 changes: 1 addition & 1 deletion pyoptsparse/pyIPOPT/pyIPOPT.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,7 @@ def __call__(
storeSens : bool
Flag sepcifying if sensitivities are to be stored in hist.
This is necessay for hot-starting only.
"""
"""

self.callCounter = 0
self.storeSens = storeSens
Expand Down
2 changes: 1 addition & 1 deletion pyoptsparse/pyNSGA2/pyNSGA2.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def __call__(self, optProb, storeHistory=None, hotStart=None, **kwargs):
-----
The kwargs are there such that the sens= argument can be
supplied (but ignored here in nsga2)
"""
"""

# ======================================================================
# NSGA-II - Objective/Constraint Values Function
Expand Down
5 changes: 4 additions & 1 deletion pyoptsparse/pyOpt_constraint.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,10 @@ def finalize(self, variables, dvOffset, index):
dvGroup, self.name
)
+ "Expecting a Jacobian of size ({}, {}) but received a Jacobian of size ({}, {}).".format(
self.ncon, ndvs, self.jac[dvGroup]["shape"][0], self.jac[dvGroup]["shape"][1],
self.ncon,
ndvs,
self.jac[dvGroup]["shape"][0],
self.jac[dvGroup]["shape"][1],
)
)
)
Expand Down
2 changes: 1 addition & 1 deletion pyoptsparse/pyOpt_gradient.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def __call__(self, x, funcs):

fail : bool
Flag for failure. It currently always returns False
"""
"""

# Since this is *very* dumb loop over all the design
# variables, it is easier to just loop over the x values as an
Expand Down
10 changes: 5 additions & 5 deletions pyoptsparse/pyOpt_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def checkVarName(self, varName):
validName : str
A valid variable name. May be the same as varName it that
was, in fact, a valid name.
"""
"""
if varName not in self.variables:
return varName
else:
Expand Down Expand Up @@ -137,7 +137,7 @@ def checkConName(self, conName):
validName : str
A valid constraint name. May be the same as conName it that
was, in fact, a valid name.
"""
"""
if conName not in self.constraints:
return conName
else:
Expand Down Expand Up @@ -348,7 +348,7 @@ def delVar(self, name):
----------
name : str
Name of variable or variable group to remove
"""
"""
try:
self.variables.pop(name)
except KeyError:
Expand Down Expand Up @@ -924,7 +924,7 @@ def getOrdering(self, conOrder, oneSided, noEquality=False):
Flag to split equality constraints into two inequality
constraints. Some optimizers (CONMIN for example) can't do
equality constraints explicitly.
"""
"""

# Now for the fun part determine what *actual* order the
# constraints need to be in: We recognize the following
Expand Down Expand Up @@ -1312,7 +1312,7 @@ def evaluateLinearConstraints(self, x, fcon):
fcon : dict
Dictionary of the constraints. The linear constraints are
to be added to this dictionary.
"""
"""

# This is actually pretty easy; it's just a matvec with the
# proper linearJacobian entry we've already computed
Expand Down
10 changes: 5 additions & 5 deletions pyoptsparse/pyOpt_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def _masterFunc(self, x, evaluate):
This list contains at least one of 'fobj', 'fcon', 'gobj'
or 'gcon'. This list tells this function which of the
values is required on return
"""
"""

# We are hot starting, we should be able to read the required
# information out of the hot start file, process it and then
Expand Down Expand Up @@ -814,7 +814,7 @@ def setOption(self, name, value=None):
Name of the option to set
value : varies
Variable value to set.
"""
"""

if name in self.options["defaults"]:
if type(value) == self.options["defaults"][name][0]:
Expand Down Expand Up @@ -850,7 +850,7 @@ def getOption(self, name):
-------
value : varies
value of option for 'name'
"""
"""

if name in self.options["defaults"]:
return self.options[name][1]
Expand All @@ -874,7 +874,7 @@ def getInform(self, infocode=None):
----------
infocode : int
Integer information code
"""
"""

if infocode is None:
return self.informs
Expand Down Expand Up @@ -906,7 +906,7 @@ def OPT(optName, *args, **kwargs):
-------
opt : pyOpt_optimizer inherited optimizer
The desired optimizer
"""
"""

optName = optName.lower()
optList = ["snopt", "ipopt", "slsqp", "nlpqlp", "conmin", "nsga2", "psqp", "alpso", "paropt"]
Expand Down
8 changes: 4 additions & 4 deletions pyoptsparse/pyOpt_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def convertToCOO(mat):
-------
newMat : dict
A coo representation of the same matrix
"""
"""

if isinstance(mat, dict):
if "coo" in mat:
Expand Down Expand Up @@ -396,7 +396,7 @@ def convertToDense(mat):


def scaleColumns(mat, factor):
""" d=
"""d=
Scale the columns of the matrix. Must be CSR format
"""
if not isinstance(mat, dict):
Expand Down Expand Up @@ -443,7 +443,7 @@ def extractRows(mat, indices):
-------
newMat : dic
pyoptsparse CSR matrix
"""
"""
rowp = mat["csr"][IROWP]
cols = mat["csr"][ICOLIND]
data = mat["csr"][IDATA]
Expand Down Expand Up @@ -482,7 +482,7 @@ def _denseToCOO(arr):
-------
dict : mat
The pyoptsparse representation of a sparse matrix
"""
"""
nRows = arr.shape[0]
nCols = arr.shape[1]
data = arr.flatten()
Expand Down
2 changes: 1 addition & 1 deletion pyoptsparse/pyPSQP/pyPSQP.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def __call__(
storeSens : bool
Flag sepcifying if sensitivities are to be stored in hist.
This is necessay for hot-starting only.
"""
"""

self.callCounter = 0
self.storeSens = storeSens
Expand Down
2 changes: 1 addition & 1 deletion pyoptsparse/pyParOpt/ParOpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def __call__(
storeSens : bool
Flag sepcifying if sensitivities are to be stored in hist.
This is necessay for hot-starting only.
"""
"""

self.callCounter = 0
self.storeSens = storeSens
Expand Down
2 changes: 1 addition & 1 deletion pyoptsparse/pySLSQP/pySLSQP.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def __call__(
storeSens : bool
Flag sepcifying if sensitivities are to be stored in hist.
This is necessay for hot-starting only.
"""
"""

self.callCounter = 0
self.storeSens = storeSens
Expand Down
2 changes: 1 addition & 1 deletion pyoptsparse/pySNOPT/pySNOPT.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def __call__(
Must be in seconds. This can be useful on queue systems when
you want an optimization to cleanly finish before the
job runs out of time.
"""
"""

self.callCounter = 0
self.storeSens = storeSens
Expand Down
5 changes: 4 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,10 @@ def configuration(parent_package="", top_path=None):

import re

__version__ = re.findall(r"""__version__ = ["']+([0-9\.]*)["']+""", open("pyoptsparse/__init__.py").read(),)[0]
__version__ = re.findall(
r"""__version__ = ["']+([0-9\.]*)["']+""",
open("pyoptsparse/__init__.py").read(),
)[0]

setup(
name="pyoptsparse",
Expand Down
30 changes: 26 additions & 4 deletions test/test_optProb.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,13 @@ def setup_optProb(self, nObj=1, nDV=[4], nCon=[2], xScale=[1.0], objScale=[1.0],
dvName = "x{}".format(iDV)
self.x0[dvName] = x0
self.optProb.addVarGroup(
dvName, n, lower=lower, upper=upper, value=x0, scale=xScale[iDV], offset=offset[iDV],
dvName,
n,
lower=lower,
upper=upper,
value=x0,
scale=xScale[iDV],
offset=offset[iDV],
)

# Constraints
Expand All @@ -66,7 +72,11 @@ def setup_optProb(self, nObj=1, nDV=[4], nCon=[2], xScale=[1.0], objScale=[1.0],
lower = np.random.uniform(-5, 2, nc)
upper = np.random.uniform(5, 6, nc)
self.optProb.addConGroup(
"con_{}".format(iCon), nc, lower=lower, upper=upper, scale=conScale[iCon],
"con_{}".format(iCon),
nc,
lower=lower,
upper=upper,
scale=conScale[iCon],
)

# Objective
Expand All @@ -86,7 +96,13 @@ def test_setDV_getDV(self):
We just test that setDV and getDV work, even with scaling
"""
self.setup_optProb(
nObj=1, nDV=[4, 8], nCon=[2, 3], xScale=[4, 1], objScale=[0.3], conScale=[0.1, 8], offset=[3, 7],
nObj=1,
nDV=[4, 8],
nCon=[2, 3],
xScale=[4, 1],
objScale=[0.3],
conScale=[0.1, 8],
offset=[3, 7],
)
# test getDV first
x0 = self.optProb.getDVs()
Expand All @@ -102,7 +118,13 @@ def test_setDV_VarGroup(self):
Test that setDV works with a subset of VarGroups
"""
self.setup_optProb(
nObj=1, nDV=[4, 8], nCon=[2, 3], xScale=[4, 1], objScale=[0.3], conScale=[0.1, 8], offset=[3, 7],
nObj=1,
nDV=[4, 8],
nCon=[2, 3],
xScale=[4, 1],
objScale=[0.3],
conScale=[0.1, 8],
offset=[3, 7],
)
oldDV = self.optProb.getDVs()
# set values for only one VarGroup
Expand Down
3 changes: 1 addition & 2 deletions test/test_snopt_bugfix.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ def objfunc_2con(xdict):


def sens(xdict, funcs):
"""f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
"""
"""f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3"""
x = xdict["x"]
y = xdict["y"]
funcsSens = {}
Expand Down
3 changes: 1 addition & 2 deletions test/test_snopt_user_termination.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,7 @@ def objfunc(self, xdict):
return funcs, fail

def sens(self, xdict, funcs):
"""f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
"""
"""f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3"""
x = xdict["x"]
y = xdict["y"]
funcsSens = {}
Expand Down