Skip to content

Commit

Permalink
Accelerate AffinityPropagation.
Browse files Browse the repository at this point in the history
Accelerate AffinityPropagation for large inputs using standard tricks
(in-place operations and out parameters) to avoid extra allocations.

Basic tests on datasets for 1000 points in 2D show ~10-20% increase in
speed.
  • Loading branch information
anntzer committed Nov 21, 2014
1 parent 2504cd6 commit a030734
Showing 1 changed file with 21 additions and 13 deletions.
34 changes: 21 additions & 13 deletions sklearn/cluster/affinity_propagation_.py
Expand Up @@ -95,6 +95,11 @@ def affinity_propagation(S, preference=None, convergence_iter=15, max_iter=200,

A = np.zeros((n_samples, n_samples))
R = np.zeros((n_samples, n_samples)) # Initialize messages
# Intermediate results
AS = np.zeros((n_samples, n_samples))
Rp = np.zeros((n_samples, n_samples))
Anew = np.zeros((n_samples, n_samples))
Rnew = np.zeros((n_samples, n_samples))

# Remove degeneracies
S += ((np.finfo(np.double).eps * S + np.finfo(np.double).tiny * 100) *
Expand All @@ -107,34 +112,37 @@ def affinity_propagation(S, preference=None, convergence_iter=15, max_iter=200,

for it in range(max_iter):
# Compute responsibilities
Rold = R.copy()
AS = A + S
np.add(A, S, AS)

I = np.argmax(AS, axis=1)
Y = AS[np.arange(n_samples), I] # np.max(AS, axis=1)

AS[ind, I[ind]] = - np.finfo(np.double).max

Y2 = np.max(AS, axis=1)
R = S - Y[:, np.newaxis]
np.subtract(S, Y[:, None], Rnew)
Rnew[ind, I[ind]] = S[ind, I[ind]] - Y2[ind]

R[ind, I[ind]] = S[ind, I[ind]] - Y2[ind]

R = (1 - damping) * R + damping * Rold # Damping
# Damping
Rnew *= 1 - damping
R *= damping
R += Rnew

# Compute availabilities
Aold = A
Rp = np.maximum(R, 0)
np.maximum(R, 0, Rp)
Rp.flat[::n_samples + 1] = R.flat[::n_samples + 1]

A = np.sum(Rp, axis=0)[np.newaxis, :] - Rp
np.subtract(np.sum(Rp, axis=0), Rp, Anew)

dA = np.diag(A)
A = np.minimum(A, 0)
dA = np.diag(Anew)
Anew = np.minimum(Anew, 0)

A.flat[::n_samples + 1] = dA
Anew.flat[::n_samples + 1] = dA

A = (1 - damping) * A + damping * Aold # Damping
# Damping
Anew *= 1 - damping
A *= damping
A += Anew

# Check for convergence
E = (np.diag(A) + np.diag(R)) > 0
Expand Down

0 comments on commit a030734

Please sign in to comment.