Permalink
Browse files

Merging

  • Loading branch information...
1 parent 3d98b30 commit b773261cddf0100df7676ac1fab1b01f1505b80e @mattloper committed Jul 15, 2014
Showing with 33 additions and 28 deletions.
  1. +7 −8 ch.py
  2. +2 −2 ch_ops.py
  3. +2 −2 extras.py
  4. +6 −1 linalg.py
  5. +3 −3 logic.py
  6. +9 −8 optimization.py
  7. +4 −4 reordering.py
View
@@ -280,12 +280,12 @@ def _compute_dr_wrt_sliced(self, wrt):
try:
jac = wrt.compute_dr_wrt(inner).T
- except:
+ except Exception as e:
import pdb; pdb.set_trace()
-
+
return self._superdot(result, jac)
-
+
@property
def shape(self):
return self.r.shape
@@ -331,7 +331,7 @@ def __setstate__(self, d):
# This restores our unpickleable "_parents" attribute
for k in set(self.dterms).intersection(set(self.__dict__.keys())):
setattr(self, k, self.__dict__[k])
-
+
def __setattr__(self, name, value, itr=None):
#print 'SETTING %s' % (name,)
@@ -471,7 +471,7 @@ def __setitem__(self, key, value, itr=None):
# kids.append(p)
# else:
# parents += [p.__dict__[k] for k in p.dterms]
- # from body.ch.optimization import minimize_dogleg
+ # from ch.optimization import minimize_dogleg
# minimize_dogleg(obj=self.__getitem__(key) - value, free_variables=kids, show_residuals=False)
else:
inner = self
@@ -554,7 +554,7 @@ def _superdot(self, lhs, rhs):
if isinstance(rhs, np.ndarray) and rhs.size==1:
rhs = rhs.ravel()[0]
-
+
if isinstance(lhs, numbers.Number) or isinstance(rhs, numbers.Number):
return lhs * rhs
@@ -740,8 +740,7 @@ def string_for(self, my_name):
result += ['%s [label="%s"];' % (dst, child_label)]
result += string_for(getattr(self, dterm), dterm)
return result
-
-
+
dot_file_contents = 'digraph G {\n%s\n}' % '\n'.join(list(set(string_for(self, 'root'))))
dot_file = tempfile.NamedTemporaryFile()
dot_file.write(dot_file_contents)
View
@@ -47,7 +47,7 @@
import numpy as np
import cPickle as pickle
import scipy.sparse as sp
-from chumpy.utils import row, col
+from utils import row, col
from copy import copy as copy_copy
__all__ += ['pi', 'set_printoptions']
@@ -337,7 +337,7 @@ def on_changed(self, which):
self.dr_cache = {}
def compute_r(self):
- return np.array(np.mean(self.x.r, axis=self.axis))
+ return np.array([np.mean(self.x.r, axis=self.axis)])
def compute_dr_wrt(self, wrt):
if wrt is not self.x:
View
@@ -1,8 +1,8 @@
__author__ = 'matt'
-import chumpy as ch
+import ch
import numpy as np
-from .utils import row, col
+from utils import row, col
import scipy.sparse as sp
import scipy.special
View
@@ -8,7 +8,7 @@
"""
-__all__ = ['inv', 'svd', 'det', 'slogdet', 'pinv', 'lstsq']
+__all__ = ['inv', 'svd', 'det', 'slogdet', 'pinv', 'lstsq', 'norm']
import numpy as np
import scipy.sparse as sp
@@ -24,6 +24,11 @@
__all__.append('tensorinv')
except: pass
+def norm(x, ord=None, axis=None):
+ if ord is not None or axis is not None:
+ raise NotImplementedError("'ord' and 'axis' should be None for now.")
+
+ return ch.sqrt(ch.sum(x**2))
def lstsq(a, b, rcond=-1):
if rcond != -1:
View
@@ -10,7 +10,7 @@
__all__ = [] # added to incrementally below
import ch
-from .ch import Ch
+from ch import Ch
import numpy as np
class LogicFunc(Ch):
@@ -34,6 +34,6 @@ def compute_dr_wrt(self, wrt):
if __name__ == '__main__':
- import chumpy as ch
+ import ch
print all(np.array([1,2,3]))
- print isinf(np.array([0,2,3]))
+ print isinf(np.array([0,2,3]))
View
@@ -15,8 +15,9 @@
import numpy as np
from numpy.linalg import norm
-from chumpy import ch
-from chumpy.utils import row, col
+import ch, utils
+from utils import row, col
+
import scipy.sparse as sp
import scipy.sparse
import scipy.optimize
@@ -26,6 +27,7 @@
vstack = lambda x : sp.vstack(x, format='csc') if any([sp.issparse(a) for a in x]) else np.vstack(x)
hstack = lambda x : sp.hstack(x, format='csc') if any([sp.issparse(a) for a in x]) else np.hstack(x)
+
# Nelder-Mead
# Powell
# CG
@@ -52,7 +54,7 @@ def minimize(fun, x0, method='dogleg', bounds=None, constraints=(), tol=None, ca
free_variables = x0
- from .ch import SumOfSquares
+ from ch import SumOfSquares
hessp = None
hess = None
@@ -206,8 +208,8 @@ def on_changed(self, which):
self.free_variables[idx].__setattr__('x', self.x.r[rng], _giter)
#self.free_variables[idx] = self.obj.replace(freevar, Ch(self.x.r[rng].copy()))
pos += sz
-
+
@property
def J(self):
result = self.dr_wrt(self.x).copy()
@@ -272,7 +274,7 @@ def _minimize_dogleg(obj, free_variables, on_step=None,
num_unique_ids = len(np.unique(np.array([id(freevar) for freevar in free_variables])))
if num_unique_ids != len(free_variables):
raise Exception('The "free_variables" param contains duplicate variables.')
-
+
obj = ChInputsStacked(obj=obj, free_variables=free_variables, x=np.concatenate([freevar.r.ravel() for freevar in free_variables]))
def call_cb():
@@ -313,7 +315,7 @@ def call_cb():
tm = time.time()
pif('computing Jacobian...')
J = obj.J
-
+
if sp.issparse(J):
assert(J.nnz > 0)
pif('Jacobian (%dx%d) computed in %.2fs' % (J.shape[0], J.shape[1], time.time() - tm))
@@ -464,8 +466,7 @@ def call_cb():
# the following "collect" is very expensive.
# please contact matt if you find situations where it actually helps things.
- # collect()
-
+ #import gc; gc.collect()
if stop or improvement_occurred or (fevals >= max_fevals):
break
if k >= k_max:
View
@@ -6,7 +6,7 @@
import ch
import numpy as np
-from chumpy.utils import row, col
+from utils import row, col
import scipy.sparse as sp
import weakref
@@ -163,7 +163,7 @@ class Reshape(Permute):
dterms = 'a',
terms = 'newshape',
term_order= 'a', 'newshape'
-
+
def compute_r(self):
return self.a.r.reshape(self.newshape)
@@ -300,8 +300,8 @@ def on_changed(self, which):
class AtleastNd(ch.Ch):
- dterms = 'x',
- terms = 'ndims',
+ dterms = 'x'
+ terms = 'ndims'
def compute_r(self):
xr = self.x.r

0 comments on commit b773261

Please sign in to comment.