Skip to content

Commit

Permalink
Merge pull request #3408 from pv/sparse-fixnumpy15-2
Browse files Browse the repository at this point in the history
MAINT: sparse: Numpy 1.5 compatibility fixes
  • Loading branch information
rgommers committed Feb 27, 2014
2 parents d2f7630 + 1d4ea93 commit c5af4b8
Show file tree
Hide file tree
Showing 5 changed files with 96 additions and 54 deletions.
10 changes: 5 additions & 5 deletions scipy/sparse/compressed.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from . import sparsetools
from .sputils import upcast, upcast_char, to_native, isdense, isshape, \
getdtype, isscalarlike, isintlike, IndexMixin, get_index_dtype, \
downcast_intp_index, _safe_unique
downcast_intp_index, _compat_unique, _compat_bincount


class _cs_matrix(_data_matrix, _minmax_mixin, IndexMixin):
Expand Down Expand Up @@ -103,8 +103,8 @@ def getnnz(self, axis=None):
axis, _ = self._swap((axis, 1 - axis))
_, N = self._swap(self.shape)
if axis == 0:
return np.bincount(downcast_intp_index(self.indices),
minlength=N)
return _compat_bincount(downcast_intp_index(self.indices),
minlength=N)
elif axis == 1:
return np.diff(self.indptr)
raise ValueError('axis out of bounds')
Expand Down Expand Up @@ -727,7 +727,7 @@ def _insert_many(self, i, j, x):
# Collate old and new in chunks by major index
indices_parts = []
data_parts = []
ui, ui_indptr = _safe_unique(i, return_index=True)
ui, ui_indptr = _compat_unique(i, return_index=True)
ui_indptr = np.append(ui_indptr, len(j))
new_nnzs = np.diff(ui_indptr)
prev = 0
Expand All @@ -739,7 +739,7 @@ def _insert_many(self, i, j, x):
data_parts.append(self.data[start:stop])

# handle duplicate j: keep last setting
uj, uj_indptr = _safe_unique(j[js:je][::-1], return_index=True)
uj, uj_indptr = _compat_unique(j[js:je][::-1], return_index=True)
if len(uj) == je - js:
indices_parts.append(j[js:je])
data_parts.append(x[js:je])
Expand Down
10 changes: 5 additions & 5 deletions scipy/sparse/coo.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from .base import isspmatrix
from .data import _data_matrix, _minmax_mixin
from .sputils import upcast, upcast_char, to_native, isshape, getdtype, isintlike, \
get_index_dtype, downcast_intp_index
get_index_dtype, downcast_intp_index, _compat_bincount


class coo_matrix(_data_matrix, _minmax_mixin):
Expand Down Expand Up @@ -224,11 +224,11 @@ def getnnz(self, axis=None):
if axis < 0:
axis += 2
if axis == 0:
return np.bincount(downcast_intp_index(self.col),
minlength=self.shape[1])
return _compat_bincount(downcast_intp_index(self.col),
minlength=self.shape[1])
elif axis == 1:
return np.bincount(downcast_intp_index(self.row),
minlength=self.shape[0])
return _compat_bincount(downcast_intp_index(self.row),
minlength=self.shape[0])
else:
raise ValueError('axis out of bounds')
nnz = property(fget=getnnz)
Expand Down
105 changes: 64 additions & 41 deletions scipy/sparse/sputils.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,50 +334,73 @@ def _index_to_arrays(self, i, j):
return i, j


if NumpyVersion(np.__version__) > '1.7.0-dev':
_safe_unique = np.unique
else:
def _safe_unique(ar, return_index=False, return_inverse=False):
"""
Copy of numpy.unique() from Numpy 1.7.1.
def _compat_unique_impl(ar, return_index=False, return_inverse=False):
"""
Copy of numpy.unique() from Numpy 1.7.1.
Earlier versions have bugs in how return_index behaves.
"""
try:
ar = ar.flatten()
except AttributeError:
if not return_inverse and not return_index:
items = sorted(set(ar))
return np.asarray(items)
else:
ar = np.asanyarray(ar).flatten()
Earlier versions have bugs in how return_index behaves.
"""
try:
ar = ar.flatten()
except AttributeError:
if not return_inverse and not return_index:
items = sorted(set(ar))
return np.asarray(items)
else:
ar = np.asanyarray(ar).flatten()

if ar.size == 0:
if return_inverse and return_index:
return ar, np.empty(0, np.bool), np.empty(0, np.bool)
elif return_inverse or return_index:
return ar, np.empty(0, np.bool)
else:
return ar
if ar.size == 0:
if return_inverse and return_index:
return ar, np.empty(0, np.bool), np.empty(0, np.bool)
elif return_inverse or return_index:
return ar, np.empty(0, np.bool)
else:
return ar

if return_inverse or return_index:
if return_inverse or return_index:
if return_index:
perm = ar.argsort(kind='mergesort')
else:
perm = ar.argsort()
aux = ar[perm]
flag = np.concatenate(([True], aux[1:] != aux[:-1]))
if return_inverse:
iflag = np.cumsum(flag) - 1
iperm = perm.argsort()
if return_index:
perm = ar.argsort(kind='mergesort')
else:
perm = ar.argsort()
aux = ar[perm]
flag = np.concatenate(([True], aux[1:] != aux[:-1]))
if return_inverse:
iflag = np.cumsum(flag) - 1
iperm = perm.argsort()
if return_index:
return aux[flag], perm[flag], iflag[iperm]
else:
return aux[flag], iflag[iperm]
return aux[flag], perm[flag], iflag[iperm]
else:
return aux[flag], perm[flag]

return aux[flag], iflag[iperm]
else:
ar.sort()
flag = np.concatenate(([True], ar[1:] != ar[:-1]))
return ar[flag]
return aux[flag], perm[flag]

else:
ar.sort()
flag = np.concatenate(([True], ar[1:] != ar[:-1]))
return ar[flag]


if NumpyVersion(np.__version__) > '1.7.0-dev':
_compat_unique = np.unique
else:
_compat_unique = _compat_unique_impl


def _compat_bincount_impl(x, weights=None, minlength=None):
"""
Bincount with minlength keyword added for Numpy 1.5.
"""
if weights is None:
x = np.bincount(x)
else:
x = np.bincount(x, weights=weights)
if minlength is not None:
if x.shape[0] < minlength:
x = np.r_[x, np.zeros((minlength - x.shape[0],))]
return x


if NumpyVersion(np.__version__) > '1.6.0-dev':
_compat_bincount = np.bincount
else:
_compat_bincount = _compat_bincount_impl
4 changes: 2 additions & 2 deletions scipy/sparse/tests/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3743,8 +3743,8 @@ def _arg1_for_noncanonical(self, M):
indptr=M.indptr)
# unsorted
for start, stop in izip(indptr, indptr[1:]):
indices[start:stop] = indices[start:stop][::-1]
data[start:stop] = data[start:stop][::-1]
indices[start:stop] = indices[start:stop][::-1].copy()
data[start:stop] = data[start:stop][::-1].copy()
return data, indices, indptr


Expand Down
21 changes: 20 additions & 1 deletion scipy/sparse/tests/test_sputils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
from __future__ import division, print_function, absolute_import

import numpy as np
from numpy.testing import TestCase, run_module_suite, assert_equal
from numpy.testing import TestCase, run_module_suite, assert_equal, dec, \
assert_array_equal
from scipy.sparse import sputils


Expand Down Expand Up @@ -67,5 +68,23 @@ def test_isdense(self):
assert_equal(sputils.isdense(np.array([1])),True)
assert_equal(sputils.isdense(np.matrix([1])),True)

def test_compat_unique(self):
x = np.array([0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 3, 3,3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5,
6, 6, 6, 6,6, 6, 6, 7, 7, 7, 7, 7, 7, 7,
8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9,9],
dtype=np.int32)
y, j1 = sputils._compat_unique_impl(x, return_index=True)
j2 = np.array([ 0, 7, 14, 21, 28, 35, 42, 49, 56, 63])
assert_array_equal(j1, j2)

def test_compat_bincount(self):
x = np.arange(4)
y1 = sputils._compat_bincount_impl(x, minlength=10)
y2 = np.array([1, 1, 1, 1, 0, 0, 0, 0, 0, 0])
assert_array_equal(y1, y2)


if __name__ == "__main__":
run_module_suite()

0 comments on commit c5af4b8

Please sign in to comment.