Skip to content

Commit

Permalink
Changing array3d function to convert list of samples into 3d numpy ar…
Browse files Browse the repository at this point in the history
…ray. Correcting the test coverage to reflect this change.
  • Loading branch information
Sylvain Chevallier committed Jan 5, 2016
1 parent f57117c commit f08c327
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 15 deletions.
3 changes: 2 additions & 1 deletion mdla.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,6 +354,7 @@ def multivariate_sparse_encode(X, dictionary, n_nonzero_coefs=None,
if verbose >= 2:
tstart = time()

X = array3d(X)
n_samples, n_features, n_dims = X.shape
if isinstance(dictionary, MultivariateDictLearning) or \
isinstance(dictionary, MiniBatchMultivariateDictLearning):
Expand Down Expand Up @@ -1499,7 +1500,7 @@ def array3d(X, dtype=None, order=None, copy=False, force_all_finite=True):
raise TypeError('A sparse matrix was passed, but dense data '
'is required. Use X.toarray() to convert to dense.')
X_3d = np.asarray(np.atleast_3d(X), dtype=dtype, order=order)
if X_3d.ndim == 2:
if type(X) is np.ndarray and X.ndim == 2:
X_3d = X_3d.swapaxes(0,2)
X_3d = X_3d.swapaxes(1,2)
if force_all_finite:
Expand Down
41 changes: 27 additions & 14 deletions test_mdla.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

rng_global = np.random.RandomState(0)
n_samples, n_features, n_dims = 10, 5, 3
X = rng_global.randn(n_samples, n_features, n_dims)
X = [rng_global.randn(n_features, n_dims) for i in range(n_samples)]

def test_mdla_shapes():
n_kernels = 8
Expand All @@ -37,7 +37,7 @@ def test_multivariate_input_shape():
assert_raises(ValueError, dico.fit, X)

n_dims = 6
Xw = rng_global.randn(n_samples, n_features, n_dims)
Xw = [rng_global.randn(n_features, n_dims) for i in range(n_samples)]
dico = MultivariateDictLearning(n_kernels=n_kernels)
assert_raises(ValueError, dico.fit, Xw)

Expand All @@ -46,7 +46,7 @@ def test_multivariate_input_shape():
assert_raises(ValueError, dico.fit, X)

n_dims = 6
Xw = rng_global.randn(n_samples, n_features, n_dims)
Xw = [rng_global.randn(n_features, n_dims) for i in range(n_samples)]
dico = MiniBatchMultivariateDictLearning(n_kernels=n_kernels)
assert_raises(ValueError, dico.fit, Xw)

Expand All @@ -55,7 +55,7 @@ def test_multivariate_input_shape():
assert_raises(ValueError, dico.partial_fit, X)

n_dims = 6
Xw = rng_global.randn(n_samples, n_features, n_dims)
Xw = [rng_global.randn(n_features, n_dims) for i in range(n_samples)]
dico = MiniBatchMultivariateDictLearning(n_kernels=n_kernels)
assert_raises(ValueError, dico.partial_fit, Xw)

Expand All @@ -79,32 +79,45 @@ def my_callback(loc):
dico = MultivariateDictLearning(n_kernels=n_kernels, random_state=0,
max_iter=2, n_nonzero_coefs=1,
callback=my_callback)
code = dico.fit(X).transform(X[0,:,:])
code = dico.fit(X).transform(X[0])
assert_true(len(code[0]) <= 1)
dico = MiniBatchMultivariateDictLearning(n_kernels=n_kernels,
random_state=0, n_iter=2, n_nonzero_coefs=1,
callback=my_callback)
code = dico.fit(X).transform(X[0,:,:])
code = dico.fit(X).transform(X[0])
assert_true(len(code[0]) <= 1)

def test_mdla_nonzero_coefs():
n_kernels = 8
dico = MultivariateDictLearning(n_kernels=n_kernels, random_state=0,
max_iter=3, n_nonzero_coefs=3, verbose=5)
code = dico.fit(X).transform(X[0,:,:])
code = dico.fit(X).transform(X[0])
assert_true(len(code[0]) <= 3)

dico = MiniBatchMultivariateDictLearning(n_kernels=n_kernels,
random_state=0, n_iter=3, n_nonzero_coefs=3, verbose=5)
code = dico.fit(X).transform(X[0,:,:])
code = dico.fit(X).transform(X[0])
assert_true(len(code[0]) <= 3)

def test_X_array():
n_kernels = 8
X = rng_global.randn(n_samples, n_features, n_dims)
dico = MultivariateDictLearning(n_kernels=n_kernels, random_state=0,
max_iter=3, n_nonzero_coefs=3, verbose=5)
code = dico.fit(X).transform(X[0])
assert_true(len(code[0]) <= 3)

dico = MiniBatchMultivariateDictLearning(n_kernels=n_kernels,
random_state=0, n_iter=3, n_nonzero_coefs=3, verbose=5)
code = dico.fit(X).transform(X[0])
assert_true(len(code[0]) <= 3)

def test_mdla_shuffle():
n_kernels = 8
dico = MiniBatchMultivariateDictLearning(n_kernels=n_kernels,
random_state=0, n_iter=3, n_nonzero_coefs=1,
verbose=5, shuffle=False)
code = dico.fit(X).transform(X[0,:,:])
code = dico.fit(X).transform(X[0])
assert_true(len(code[0]) <= 1)

def test_n_kernels():
Expand Down Expand Up @@ -141,7 +154,7 @@ def test_sparse_encode():

def test_dict_init():
n_kernels = 8
d = [np.random.randn(n_features, n_dims) for i in range(n_kernels)]
d = [rng_global.randn(n_features, n_dims) for i in range(n_kernels)]
for i in range(len(d)):
d[i] /= np.linalg.norm(d[i], 'fro')
dico = MultivariateDictLearning(n_kernels=n_kernels, random_state=0,
Expand All @@ -150,7 +163,7 @@ def test_dict_init():
dico = dico.fit(X)
for i in range(n_kernels):
assert_array_almost_equal(dico.kernels_[i], d[i])
# code = dico.fit(X).transform(X[0,:,:])
# code = dico.fit(X).transform(X[0])
# assert_true(len(code[0]) > 1)

dico = MiniBatchMultivariateDictLearning(n_kernels=n_kernels,
Expand All @@ -159,13 +172,13 @@ def test_dict_init():
dico = dico.fit(X)
for i in range(n_kernels):
assert_array_almost_equal(dico.kernels_[i], d[i])
# code = dico.fit(X).transform(X[0,:,:])
# code = dico.fit(X).transform(X[0])
# assert_true(len(code[0]) <= 1)

def test_mdla_dict_init():
n_kernels = 10
n_samples, n_features, n_dims = 20, 5, 3
X = rng_global.randn(n_samples, n_features, n_dims)
X = [rng_global.randn(n_features, n_dims) for i in range(n_samples)]
dict_init = [np.random.randn(n_features, n_dims) for i in range(n_kernels)]
dico = MultivariateDictLearning(n_kernels=n_kernels, random_state=0,
max_iter=10, dict_init=dict_init).fit(X)
Expand All @@ -178,7 +191,7 @@ def test_mdla_dict_update():
n_kernels = 10
# n_samples, n_features, n_dims = 100, 5, 3
n_samples, n_features, n_dims = 80, 5, 3
X = rng_global.randn(n_samples, n_features, n_dims)
X = [rng_global.randn(n_features, n_dims) for i in range(n_samples)]
dico = MultivariateDictLearning(n_kernels=n_kernels, random_state=0,
max_iter=10, n_jobs=-1).fit(X)
first_epoch = list(dico.kernels_)
Expand Down

0 comments on commit f08c327

Please sign in to comment.