Skip to content

Commit

Permalink
keyset speedups
Browse files Browse the repository at this point in the history
  • Loading branch information
bqpd committed Mar 1, 2020
1 parent 8f020e9 commit 305cae8
Show file tree
Hide file tree
Showing 7 changed files with 98 additions and 102 deletions.
4 changes: 2 additions & 2 deletions docs/source/examples/boundschecking_output.txt
Expand Up @@ -15,8 +15,8 @@ Constraints
mf = rf·V
Fs <= mi

BoundsChecking.D has no lower bound, but would gain it from any of these sets: [(BoundsChecking.Ap, 'lower')]
BoundsChecking.Ap has no lower bound, but would gain it from any of these sets: [(BoundsChecking.nu, 'lower')] or [(BoundsChecking.D, 'lower')]

BoundsChecking.nu has no lower bound, but would gain it from any of these sets: [(BoundsChecking.Ap, 'lower')]

BoundsChecking.Ap has no lower bound, but would gain it from any of these sets: [(BoundsChecking.D, 'lower')] or [(BoundsChecking.nu, 'lower')]
BoundsChecking.D has no lower bound, but would gain it from any of these sets: [(BoundsChecking.Ap, 'lower')]
11 changes: 5 additions & 6 deletions gpkit/constraints/gp.py
Expand Up @@ -6,7 +6,6 @@
from ..nomials import NomialData
from ..small_classes import CootMatrix, SolverLog, Numbers, FixedScalar
from ..keydict import KeyDict
from ..small_scripts import mag
from ..solution_array import SolutionArray
from .costed import CostedConstraintSet
from ..exceptions import (InvalidPosynomial, Infeasible, UnknownInfeasible,
Expand Down Expand Up @@ -85,11 +84,11 @@ def __init__(self, cost, constraints, substitutions,
self.k = [len(hm) for hm in self.hmaps]
p_idxs = [] # p_idxs [i]: posynomial index of each monomial
self.m_idxs = [] # m_idxs [i]: monomial indices of each posynomial
self.meq_idxs = [] # meq_idxs: first mon-index of each mon equality
self.meq_idxs = set() # meq_idxs: first mon-index of each mon equality
m_idx_start = 0
for i, p_len in enumerate(self.k):
if getattr(self.hmaps[i], "from_meq", False):
self.meq_idxs.append(m_idx_start)
self.meq_idxs.add(m_idx_start)
self.m_idxs.append(list(range(m_idx_start, m_idx_start + p_len)))
p_idxs += [i]*p_len
m_idx_start += p_len
Expand All @@ -111,9 +110,9 @@ def check_bounds(self, allow_missingbounds=True):
if upperbound and lowerbound:
break
if not upperbound:
missingbounds[(var, "upper")] = ""
missingbounds[(var, "upper")] = "."
if not lowerbound:
missingbounds[(var, "lower")] = ""
missingbounds[(var, "lower")] = "."
if not missingbounds:
return {}
meq_bounds = gen_meq_bounds(missingbounds, self.exps, self.meq_idxs)
Expand Down Expand Up @@ -411,7 +410,7 @@ def _almost_equal(num1, num2):
% (np.exp(dual_cost), cost))


def gen_meq_bounds(missingbounds, exps, meq_idxs): # pylint: disable=too-many-locals
def gen_meq_bounds(missingbounds, exps, meq_idxs): # pylint: disable=too-many-locals,too-many-branches
"Generate conditional monomial equality bounds"
meq_bounds = defaultdict(set)
for i in meq_idxs:
Expand Down
8 changes: 4 additions & 4 deletions gpkit/constraints/set.py
Expand Up @@ -40,12 +40,11 @@ def _sort_constrs(item):
# pylint: disable=too-many-instance-attributes
class ConstraintSet(list, GPkitObject):
"Recursive container for ConstraintSets and Inequalities"
varkeys = None
unique_varkeys = frozenset()
# idxlookup holds the names of the top-level constraintsets
idxlookup = None
_name_collision_varkeys = None
varkeys = _name_collision_varkeys = None
idxlookup = None # holds the names of the top-level constraintsets

# @profile
def __init__(self, constraints, substitutions=None): # pylint: disable=too-many-branches
if isinstance(constraints, ConstraintSet):
constraints = [constraints] # put it one level down
Expand Down Expand Up @@ -174,6 +173,7 @@ def flat(self, constraintsets=False):
for yielded_constraint in subgenerator:
yield yielded_constraint

# @profile
def reset_varkeys(self):
"Goes through constraints and collects their varkeys."
self.varkeys = KeySet(self.unique_varkeys)
Expand Down
1 change: 0 additions & 1 deletion gpkit/constraints/sgp.py
Expand Up @@ -9,7 +9,6 @@
from ..nomials import PosynomialInequality
from .. import NamedVariables
from .costed import CostedConstraintSet
from ..small_scripts import mag


EPS = 1e-6 # determines what counts as "convergence"
Expand Down
167 changes: 83 additions & 84 deletions gpkit/keydict.py
Expand Up @@ -24,15 +24,12 @@ def clean_value(key, value):
value = value.to(key.units or "dimensionless").magnitude
return value


class KeyDict(dict):
"""KeyDicts do two things over a dict: map keys and collapse arrays.
>>>> kd = gpkit.keydict.KeyDict()
class KeyMap:
"""Helper class to provide KeyMapping to interfaces.
Mapping keys
------------
If ``.keymapping`` is True, a KeyDict keeps an internal list of VarKeys as
A KeyMap keeps an internal list of VarKeys as
canonical keys, and their values can be accessed with any object whose
`key` attribute matches one of those VarKeys, or with strings matching
any of the multiple possible string interpretations of each key:
Expand All @@ -46,55 +43,20 @@ class KeyDict(dict):
Note that if a item is set using a key that does not have a `.key`
attribute, that key can be set and accessed normally.
Collapsing arrays
-----------------
If ``.collapse_arrays`` is True then VarKeys which have a `shape`
parameter (indicating they are part of an array) are stored as numpy
arrays, and automatically de-indexed when a matching VarKey with a
particular `idx` parameter is used as a key.
See also: gpkit/tests/t_keydict.py.
"""
collapse_arrays = True
keymapping = True
collapse_arrays = False
keymap = []
log_gets = False
varkeys = None

def __init__(self, *args, **kwargs):
"Passes through to dict.__init__ via the `update()` method"
# pylint: disable=super-init-not-called
self.varkeys = None
self.keymap = defaultdict(set)
self._unmapped_keys = set()
self.log_gets = False
self.logged_gets = set()
self.owned = set()
self.update(*args, **kwargs)

def get(self, key, alternative=KeyError):
if key not in self:
if alternative is KeyError:
raise alternative(key)
return alternative
return self[key]

def _copyonwrite(self, key):
"Copys arrays before they are written to"
if not hasattr(self, "owned"): # backwards pickle compatibility
self.owned = set()
if key not in self.owned:
dict.__setitem__(self, key, dict.__getitem__(self, key).copy())
self.owned.add(key)

def update(self, *args, **kwargs):
"Iterates through the dictionary created by args and kwargs"
if not self and len(args) == 1 and isinstance(args[0], KeyDict):
dict.update(self, args[0])
self.keymap.update(args[0].keymap)
self._unmapped_keys.update(args[0]._unmapped_keys) # pylint:disable=protected-access
else:
for k, v in dict(*args, **kwargs).items():
self[k] = v
self.logged_gets = set()
self.update(*args, **kwargs) # pylint: disable=no-member

def parse_and_index(self, key):
"Returns key if key had one, and veckey/idx for indexed veckeys."
Expand Down Expand Up @@ -137,25 +99,80 @@ def __contains__(self, key): # pylint:disable=too-many-return-statements
return True
if not isinstance(key, Hashable):
return False
if dict.__contains__(self, key):
if super().__contains__(key): # pylint: disable=no-member
if idx:
try:
value = dict.__getitem__(self, key)[idx]
value = super().__getitem__(key)[idx] # pylint: disable=no-member
return True if is_sweepvar(value) else not isnan(value)
except TypeError:
raise TypeError("%s has an idx, but its value in this"
" KeyDict is the scalar %s."
% (key, dict.__getitem__(self, key)))
% (key, super().__getitem__(key))) # pylint: disable=no-member
except IndexError:
raise IndexError("key %s with idx %s is out of bounds"
" for value %s" %
(key, idx,
dict.__getitem__(self, key)))
(key, idx, super().__getitem__(key))) # pylint: disable=no-member
return True
if key in self.keymap:
return True
return False

def update_keymap(self):
"Updates the keymap with the keys in _unmapped_keys"
copied = set() # have to copy bc update leaves duplicate sets
while self._unmapped_keys:
key = self._unmapped_keys.pop()
if hasattr(key, "keys"):
for mapkey in key.keys:
if mapkey not in copied and mapkey in self.keymap:
self.keymap[mapkey] = set(self.keymap[mapkey])
copied.add(mapkey)
self.keymap[mapkey].add(key)


class KeyDict(KeyMap, dict):
"""KeyDicts do two things over a dict: map keys and collapse arrays.
>>>> kd = gpkit.keydict.KeyDict()
For mapping keys, see KeyMapper.__doc__
Collapsing arrays
-----------------
If ``.collapse_arrays`` is True then VarKeys which have a `shape`
parameter (indicating they are part of an array) are stored as numpy
arrays, and automatically de-indexed when a matching VarKey with a
particular `idx` parameter is used as a key.
See also: gpkit/tests/t_keydict.py.
"""
collapse_arrays = True

def get(self, key, alternative=KeyError):
if key not in self:
if alternative is KeyError:
raise alternative(key)
return alternative
return self[key]

def _copyonwrite(self, key):
"Copys arrays before they are written to"
if not hasattr(self, "owned"): # backwards pickle compatibility
self.owned = set()
if key not in self.owned:
super().__setitem__(key, super().__getitem__(key).copy())
self.owned.add(key)

def update(self, *args, **kwargs):
"Iterates through the dictionary created by args and kwargs"
if not self and len(args) == 1 and isinstance(args[0], KeyDict):
super().update(args[0])
self.keymap.update(args[0].keymap)
self._unmapped_keys.update(args[0]._unmapped_keys) # pylint:disable=protected-access
else:
for k, v in dict(*args, **kwargs).items():
self[k] = v

def __call__(self, key):
got = self[key]
# if uniting ever becomes a speed hit, cache the results
Expand Down Expand Up @@ -244,18 +261,6 @@ def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
self.owned.add(key)

def update_keymap(self):
"Updates the keymap with the keys in _unmapped_keys"
copied = set() # have to copy bc update leaves duplicate sets
while self.keymapping and self._unmapped_keys:
key = self._unmapped_keys.pop()
if hasattr(key, "keys"):
for mapkey in key.keys:
if mapkey not in copied and mapkey in self.keymap:
self.keymap[mapkey] = set(self.keymap[mapkey])
copied.add(mapkey)
self.keymap[mapkey].add(key)

def __delitem__(self, key):
"Overloads del [] to work with all keys"
key, idx = self.parse_and_index(key)
Expand All @@ -266,13 +271,13 @@ def __delitem__(self, key):
for k in list(keys):
delete = True
if idx:
dict.__getitem__(self, k)[idx] = np.nan
if not isnan(dict.__getitem__(self, k)).all():
super().__getitem__(k)[idx] = np.nan
if not isnan(super().__getitem__(k)).all():
delete = False
if delete:
dict.__delitem__(self, k)
super().__delitem__(k)
mapkeys = set([k])
if self.keymapping and hasattr(k, "keys"):
if hasattr(k, "keys"):
mapkeys.update(k.keys)
for mapkey in mapkeys:
if mapkey in self.keymap:
Expand All @@ -285,33 +290,27 @@ def __delitem__(self, key):
self.keymap[mapkey].remove(k)


class KeySet(KeyDict):
"KeyDicts that don't collapse arrays or store values."
class KeySet(KeyMap, set):
"KeyMaps that don't collapse arrays or store values."
collapse_arrays = False

def add(self, item):
"Adds an item to the keyset"
key, _ = self.parse_and_index(item)
if key not in self.keymap:
self.keymap[key].add(key)
self._unmapped_keys.add(key)
dict.__setitem__(self, key, None)

def update(self, *args, **kwargs):
"Iterates through the dictionary created by args and kwargs"
if len(args) == 1:
arg, = args
if isinstance(arg, KeySet): # assume unmapped
dict.update(self, arg)
if isinstance(arg, KeySet):
set.update(self, arg)
for key, value in arg.keymap.items():
self.keymap[key].update(value)
self._unmapped_keys.update(arg._unmapped_keys) # pylint: disable=protected-access
else: # set-like interface
for item in arg:
self.add(item)
else: # dict-like interface
for k in dict(*args, **kwargs):
self.add(k)
keys = {item.key for item in arg}
for key in keys:
self.keymap[key].add(key)
self._unmapped_keys.update(keys)
super().update(keys)
else: # set-like interface
super().update(set(*args, **kwargs))

def __getitem__(self, key):
"Gets the keys corresponding to a particular key."
Expand Down
4 changes: 2 additions & 2 deletions gpkit/nomials/math.py
Expand Up @@ -643,8 +643,8 @@ def as_hmapslt1(self, substitutions=None):
self._negysig = Signomial(negy_hmap, require_positive=False)
self._coeffsigs = {exp: Signomial(hmap, require_positive=False)
for exp, hmap in posy_hmaps.items()}
self._sigvars = {exp: (list(self._negysig.varkeys.keys())
+ list(sig.varkeys.keys()))
self._sigvars = {exp: (list(self._negysig.varkeys)
+ list(sig.varkeys))
for exp, sig in self._coeffsigs.items()}
return p_ineq.as_hmapslt1(substitutions)

Expand Down
5 changes: 2 additions & 3 deletions gpkit/tests/t_nomials.py
Expand Up @@ -4,7 +4,6 @@
from gpkit import Variable, Monomial, Posynomial, Signomial, SignomialsEnabled
from gpkit import VectorVariable, NomialArray
from gpkit.nomials import NomialMap
from gpkit.small_classes import HashVector
from gpkit.exceptions import InvalidPosynomial
import gpkit

Expand Down Expand Up @@ -294,8 +293,8 @@ def test_init(self):
self.assertEqual(p, sum(ms))
_ = hash(p2)

hmap = NomialMap({HashVector({'m': 1, 'v': 2}): 0.5,
HashVector({'m': 1, 'g': 1, 'h': 1}): 1})
hmap = NomialMap({Monomial({'m': 1, 'v': 2}).exp : 0.5,
Monomial({'m': 1, 'g': 1, 'h': 1}).exp: 1})
hmap.units_of_product(None)
p = Posynomial(hmap)
m, = p.varkeys["m"]
Expand Down

0 comments on commit 305cae8

Please sign in to comment.