Skip to content

Commit

Permalink
[MRG+1] NMF speed-up for beta_loss = 0 (scikit-learn#9277)
Browse files Browse the repository at this point in the history
  • Loading branch information
hongkahjun authored and massich committed Jul 13, 2017
1 parent ce6b00a commit 4c777b0
Showing 1 changed file with 15 additions and 0 deletions.
15 changes: 15 additions & 0 deletions sklearn/decomposition/nmf.py
Expand Up @@ -545,6 +545,13 @@ def _multiplicative_update_w(X, W, H, beta_loss, l1_reg_W, l2_reg_W, gamma,

if beta_loss == 1:
np.divide(X_data, WH_safe_X_data, out=WH_safe_X_data)
elif beta_loss == 0:
# speeds up computation time
# refer to /numpy/numpy/issues/9363
WH_safe_X_data **= -1
WH_safe_X_data **= 2
# element-wise multiplication
WH_safe_X_data *= X_data
else:
WH_safe_X_data **= beta_loss - 2
# element-wise multiplication
Expand Down Expand Up @@ -619,6 +626,13 @@ def _multiplicative_update_h(X, W, H, beta_loss, l1_reg_H, l2_reg_H, gamma):

if beta_loss == 1:
np.divide(X_data, WH_safe_X_data, out=WH_safe_X_data)
elif beta_loss == 0:
# speeds up computation time
# refer to /numpy/numpy/issues/9363
WH_safe_X_data **= -1
WH_safe_X_data **= 2
# element-wise multiplication
WH_safe_X_data *= X_data
else:
WH_safe_X_data **= beta_loss - 2
# element-wise multiplication
Expand Down Expand Up @@ -1167,6 +1181,7 @@ class NMF(BaseEstimator, TransformerMixin):
Fevotte, C., & Idier, J. (2011). Algorithms for nonnegative matrix
factorization with the beta-divergence. Neural Computation, 23(9).
"""

def __init__(self, n_components=None, init=None, solver='cd',
beta_loss='frobenius', tol=1e-4, max_iter=200,
random_state=None, alpha=0., l1_ratio=0., verbose=0,
Expand Down

0 comments on commit 4c777b0

Please sign in to comment.