Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Reverse Cuthill-McKee and Maximum Bipartite Matching reorderings of sparse graphs #3751

Merged
merged 19 commits into from
Aug 31, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@ scipy/sparse/csgraph/_min_spanning_tree.c
scipy/sparse/csgraph/_shortest_path.c
scipy/sparse/csgraph/_tools.c
scipy/sparse/csgraph/_traversal.c
scipy/sparse/csgraph/_reordering.c
scipy/sparse/linalg/dsolve/umfpack/_umfpack.py
scipy/sparse/linalg/dsolve/umfpack/_umfpack_wrap.c
scipy/sparse/linalg/eigen/arpack/_arpack-f2pywrappers.f
Expand Down
3 changes: 3 additions & 0 deletions scipy/sparse/csgraph/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,8 @@
'breadth_first_tree',
'depth_first_tree',
'minimum_spanning_tree',
'reverse_cuthill_mckee',
'maximum_bipartite_matching',
'construct_dist_matrix',
'reconstruct_path',
'csgraph_from_dense',
Expand All @@ -150,6 +152,7 @@
from ._traversal import breadth_first_order, depth_first_order, \
breadth_first_tree, depth_first_tree, connected_components
from ._min_spanning_tree import minimum_spanning_tree
from ._reordering import reverse_cuthill_mckee, maximum_bipartite_matching
from ._tools import construct_dist_matrix, reconstruct_path,\
csgraph_from_dense, csgraph_to_dense, csgraph_masked_from_dense,\
csgraph_from_masked
Expand Down
240 changes: 240 additions & 0 deletions scipy/sparse/csgraph/_reordering.pyx
Original file line number Diff line number Diff line change
@@ -0,0 +1,240 @@
# Author: Paul Nation -- <pnation@korea.ac.kr>
# Original Source: QuTiP: Quantum Toolbox in Python (qutip.org)
# License: New BSD, (C) 2014

import numpy as np
cimport numpy as np

from scipy.sparse import isspmatrix_csr, isspmatrix_csc, isspmatrix_coo

include 'parameters.pxi'

def reverse_cuthill_mckee(graph, symmetric_mode=False):
"""
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Docstring format: need a one-line summary, then a blank line. The first line gets picked up in listing in html docs.

Returns the permutation array that orders a sparse CSR or CSC matrix
in Reverse-Cuthill McKee ordering.

It is assumed by default, ``symmetric_mode=False``, that the input matrix
is not symmetric and works on the matrix ``A+A.T``. If you are
guaranteed that the matrix is symmetric in structure (values of matrix
elements do not matter) then set ``symmetric_mode=True``.

Parameters
----------
graph : sparse matrix
Input sparse in CSC or CSR sparse matrix format.
symmetric_mode : bool, optional
Is input matrix guaranteed to be symmetric.

Returns
-------
perm : ndarray
Array of permuted row and column indices.

References
----------
E. Cuthill and J. McKee, "Reducing the Bandwidth of Sparse Symmetric Matrices",
ACM '69 Proceedings of the 1969 24th national conference, (1969).

"""
if not (isspmatrix_csc(graph) or isspmatrix_csr(graph)):
raise TypeError('Input must be in CSC or CSR sparse matrix format.')
nrows = graph.shape[0]
if not symmetric_mode:
graph = graph+graph.transpose()
return _reverse_cuthill_mckee(graph.indices, graph.indptr, nrows)


def maximum_bipartite_matching(graph, perm_type='row'):
"""
Returns an array of row or column permutations that makes
the diagonal of a nonsingular square CSC sparse matrix zero free.

Such a permutation is always possible provided that the matrix
is nonsingular. This function looks at the structure of the matrix
only. The input matrix will be converted to CSC matrix format if
necessary.

Parameters
----------
graph : sparse matrix
Input sparse in CSC format
perm_type : str, {'row', 'column'}
Type of permutation to generate.

Returns
-------
perm : ndarray
Array of row or column permutations.

Notes
-----
This function relies on a maximum cardinality bipartite matching
algorithm based on a breadth-first search (BFS) of the underlying
graph.

References
----------
I. S. Duff, K. Kaya, and B. Ucar, "Design, Implementation, and
Analysis of Maximum Transversal Algorithms", ACM Trans. Math. Softw.
38, no. 2, (2011).

"""
nrows = graph.shape[0]
if graph.shape[0] != graph.shape[1]:
raise ValueError('Maximum bipartite matching requires a square matrix.')
if isspmatrix_csr(graph) or isspmatrix_coo(graph):
graph = graph.tocsc()
elif not isspmatrix_csc(graph):
raise TypeError("graph must be in CSC, CSR, or COO format.")
if perm_type == 'column':
graph = graph.transpose().tocsc()
perm = _maximum_bipartite_matching(graph.indices, graph.indptr, nrows)
if np.any(perm==-1):
raise Exception('Possibly singular input matrix.')
return perm



def _node_degrees(
np.ndarray[int32_or_int64, ndim=1, mode="c"] ind,
np.ndarray[int32_or_int64, ndim=1, mode="c"] ptr,
int num_rows):
"""
Find the degree of each node (matrix row) in a graph represented
by a sparse CSR or CSC matrix.
"""
cdef unsigned int ii, jj
cdef np.ndarray[int32_or_int64] degree = np.zeros(num_rows, dtype=ind.dtype)

for ii in range(num_rows):
degree[ii] = ptr[ii + 1] - ptr[ii]
for jj in range(ptr[ii], ptr[ii + 1]):
if ind[jj] == ii:
# add one if the diagonal is in row ii
degree[ii] += 1
break
return degree


def _reverse_cuthill_mckee(np.ndarray[int32_or_int64, ndim=1, mode="c"] ind,
np.ndarray[int32_or_int64, ndim=1, mode="c"] ptr,
int num_rows):
"""
Reverse Cuthill-McKee ordering of a sparse symmetric CSR or CSC matrix.
We follow the original Cuthill-McKee paper and always start the routine
at a node of lowest degree for each connected component.
"""
cdef unsigned int N = 0, N_old, level_start, level_end, temp
cdef unsigned int zz, ii, jj, kk, ll, level_len
cdef np.ndarray[int32_or_int64] order = np.zeros(num_rows, dtype=ind.dtype)
cdef np.ndarray[ITYPE_t] degree = _node_degrees(ind, ptr, num_rows).astype(ITYPE)
cdef np.ndarray[ITYPE_t] inds = np.argsort(degree).astype(ITYPE)
cdef np.ndarray[ITYPE_t] rev_inds = np.argsort(inds).astype(ITYPE)
cdef np.ndarray[ITYPE_t] temp_degrees = np.zeros(np.max(degree), dtype=ITYPE)
cdef int32_or_int64 i, j, seed, temp2

# loop over zz takes into account possible disconnected graph.
for zz in range(num_rows):
if inds[zz] != -1: # Do BFS with seed=inds[zz]
seed = inds[zz]
order[N] = seed
N += 1
inds[rev_inds[seed]] = -1
level_start = N - 1
level_end = N

while level_start < level_end:
for ii in range(level_start, level_end):
i = order[ii]
N_old = N

# add unvisited neighbors
for jj in range(ptr[i], ptr[i + 1]):
# j is node number connected to i
j = ind[jj]
if inds[rev_inds[j]] != -1:
inds[rev_inds[j]] = -1
order[N] = j
N += 1

# Add values to temp_degrees array for insertion sort
level_len = 0
for kk in range(N_old, N):
temp_degrees[level_len] = degree[order[kk]]
level_len += 1

# Do insertion sort for nodes from lowest to highest degree
for kk in range(1,level_len):
temp = temp_degrees[kk]
temp2 = order[N_old+kk]
ll = kk
while (ll > 0) and (temp < temp_degrees[ll-1]):
temp_degrees[ll] = temp_degrees[ll-1]
order[N_old+ll] = order[N_old+ll-1]
ll -= 1
temp_degrees[ll] = temp
order[N_old+ll] = temp2

# set next level start and end ranges
level_start = level_end
level_end = N

if N == num_rows:
break

# return reversed order for RCM ordering
return order[::-1]


def _maximum_bipartite_matching(
np.ndarray[int32_or_int64, ndim=1, mode="c"] inds,
np.ndarray[int32_or_int64, ndim=1, mode="c"] ptrs,
int n):
"""
Maximum bipartite matching of a graph in CSC format.
"""
cdef np.ndarray[int32_or_int64] visited = np.zeros(n, dtype=inds.dtype)
cdef np.ndarray[ITYPE_t] queue = np.zeros(n, dtype=ITYPE)
cdef np.ndarray[ITYPE_t] previous = np.zeros(n, dtype=ITYPE)
cdef np.ndarray[int32_or_int64] match = (-1 * np.ones(n)).astype(inds.dtype)
cdef np.ndarray[ITYPE_t] row_match = (-1 * np.ones(n)).astype(ITYPE)
cdef int queue_ptr, queue_col, ptr, i, j, queue_size
cdef int col, next_num = 1
cdef int32_or_int64 row, temp, eptr

for i in range(n):
if match[i] == -1 and (ptrs[i] != ptrs[i + 1]):
queue[0] = i
queue_ptr = 0
queue_size = 1
while (queue_size > queue_ptr):
queue_col = queue[queue_ptr]
queue_ptr += 1
eptr = ptrs[queue_col + 1]
for ptr in range(ptrs[queue_col], eptr):
row = inds[ptr]
temp = visited[row]
if (temp != next_num and temp != -1):
previous[row] = queue_col
visited[row] = next_num
col = row_match[row]
if (col == -1):
while (row != -1):
col = previous[row]
temp = match[col]
match[col] = row
row_match[row] = col
row = temp
next_num += 1
queue_size = 0
break
else:
queue[queue_size] = col
queue_size += 1

if match[i] == -1:
for j in range(1, queue_size):
visited[match[queue[j]]] = -1

return match
2 changes: 2 additions & 0 deletions scipy/sparse/csgraph/bento.info
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,7 @@ Library:
Sources: _traversal.c
Extension: _min_spanning_tree
Sources: _min_spanning_tree.c
Extension: _reordering
Sources: _reordering.c
Extension: _tools
Sources: _tools.c
6 changes: 6 additions & 0 deletions scipy/sparse/csgraph/parameters.pxi
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@

DTYPE = np.float64
ctypedef np.float64_t DTYPE_t

ITYPE = np.int32
ctypedef np.int32_t ITYPE_t

# Fused type for int32 and int64
ctypedef fused int32_or_int64:
np.int32_t
np.int64_t

# EPS is the precision of DTYPE
cdef DTYPE_t DTYPE_EPS = 1E-15

Expand Down
4 changes: 4 additions & 0 deletions scipy/sparse/csgraph/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ def configuration(parent_package='', top_path=None):
config.add_extension('_min_spanning_tree',
sources=['_min_spanning_tree.c'],
include_dirs=[numpy.get_include()])

config.add_extension('_reordering',
sources=['_reordering.c'],
include_dirs=[numpy.get_include()])

config.add_extension('_tools',
sources=['_tools.c'],
Expand Down
Loading