Skip to content

Commit

Permalink
before sync
Browse files Browse the repository at this point in the history
  • Loading branch information
Sylvain Chevallier committed Sep 29, 2015
1 parent 8c209f6 commit 0d8c219
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 36 deletions.
13 changes: 7 additions & 6 deletions example_sparse_decomposition.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,37 +113,38 @@ def decomposition_random_dictionary(Gaussian=True, rng=None, n_features=65,

fig = plt.figure(figsize=(15,5))
uni = fig.add_subplot(1,3,1)
uni.set_title(r'Random univariate (n=1) dictionary')
uni.set_title(r'$n$=1')
uni.errorbar(range(1, n_nonzero_coefs+1), rmse_uniform1.mean(1),
yerr=rmse_uniform1.std(1), label='Uniform')
uni.errorbar(range(1, n_nonzero_coefs+1), rmse_gaussian1.mean(1),
yerr=rmse_gaussian1.std(1), color='r', label='Gaussian')
uni.plot(range(n_nonzero_coefs+2), np.zeros(n_nonzero_coefs+2), 'k')
uni.axis([0, n_nonzero_coefs+1, 0, 90])
uni.axis([0, n_nonzero_coefs+1, 0, 100])
uni.set_xticks(range(0, n_nonzero_coefs+2, 5))
uni.set_ylabel('rRMSE (%)')
uni.legend(loc='upper right')
mul1 = fig.add_subplot(1,3,2)
mul1.set_title(r'Random multivariate (n=3) dictionary')
mul1.set_title(r'Random multivariate dictionary, $n$=3')
mul1.errorbar(range(1, n_nonzero_coefs+1), rmse_uniform2.mean(1),
yerr=rmse_uniform2.std(1), label='Uniform')
mul1.errorbar(range(1, n_nonzero_coefs+1), rmse_gaussian2.mean(1),
yerr=rmse_gaussian2.std(1), color='r', label='Gaussian')
mul1.plot(range(n_nonzero_coefs+2), np.zeros(n_nonzero_coefs+2), 'k')
mul1.axis([0, n_nonzero_coefs+1, 0, 90])
mul1.axis([0, n_nonzero_coefs+1, 0, 100])
mul1.set_xticks(range(0, n_nonzero_coefs+2, 5))
mul1.set_xlabel('k')
mul1.legend(loc='upper right')
mul2 = fig.add_subplot(1,3,3)
mul2.set_title(r'Random multivariate (n=5) dictionary')
mul2.set_title(r'$n$=5')
mul2.errorbar(range(1, n_nonzero_coefs+1), rmse_uniform3.mean(1),
yerr=rmse_uniform3.std(1), label='Uniform')
mul2.errorbar(range(1, n_nonzero_coefs+1), rmse_gaussian3.mean(1),
yerr=rmse_gaussian3.std(1), color='r', label='Gaussian')
mul2.plot(range(n_nonzero_coefs+2), np.zeros(n_nonzero_coefs+2), 'k')
mul2.axis([0, n_nonzero_coefs+1, 0, 90])
mul2.axis([0, n_nonzero_coefs+1, 0, 100])
mul2.set_xticks(range(0, n_nonzero_coefs+2, 5))
mul2.legend(loc='upper right')
plt.tight_layout(.5)
plt.savefig('sparse_decomposition_multivariate.png')


57 changes: 30 additions & 27 deletions example_univariate.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,46 +5,48 @@
from mdla import multivariate_sparse_encode
from dict_metrics import hausdorff, emd, detectionRate
from numpy.linalg import norm
from numpy import array, arange, zeros
from numpy import array, arange, zeros, min, max
from numpy.random import rand, randn, permutation, randint

# TODO: Add SNR, repeat experiments to make stats, make a fast and a
# long version, use callback to compute distance

def plot_univariate(objective_error, detection_rate, wasserstein, figname):
fig = plt.figure(figsize=(10,6))
def plot_univariate(objective_error, detection_rate, wasserstein,
n_iter, figname):
fig = plt.figure(figsize=(15,5))

# plotting data from objective error
objerr = fig.add_subplot(1,3,1)
oe = objerr.plot(arange(1, len(objective_error)+1), objective_error,
_ = objerr.plot(n_iter*arange(1, len(objective_error)+1), objective_error,
color='green', label=r'Objective error')
# objerr.axis([0, len(objective_error)-1, 0, np.max(objective_error)])
# objerr.set_xticks(arange(0,len(objective_error)+1,10))
objerr.axis([0, len(objective_error)-1, min(objective_error),
max(objective_error)])
objerr.set_xticks(arange(0, n_iter*len(objective_error)+1, n_iter))
objerr.set_xlabel('Iteration')
objerr.set_ylabel(r'Error (no unit)')
objerr.legend(loc='lower right')
objerr.legend(loc='upper right')

# plotting data from detection rate 0.97
# plotting data from detection rate 0.99
detection = fig.add_subplot(1,3,2)
detrat = detection.plot(arange(1,len(detection_rate)+1), detection_rate,
color='magenta', label=r'Detection rate 0.97')
# detection.axis([0, len(detection_rate), 0, 100])
# detection.set_xticks(arange(0, len(detection_rate),10))
# detection.set_xlabel('Iteration')
_ = detection.plot(n_iter*arange(1,len(detection_rate)+1), detection_rate,
color='magenta', label=r'Detection rate 0.99')
detection.axis([0, len(detection_rate), 0, 100])
detection.set_xticks(arange(0, n_iter*len(detection_rate)+1, n_iter))
detection.set_xlabel('Iteration')
detection.set_ylabel(r'Recovery rate (in %)')
detection.legend(loc='lower right')
detection.legend(loc='upper left')

# plotting data from our metric
met = fig.add_subplot(1,3,3)
wass = met.plot(arange(1, len(wasserstein)+1), 100-wasserstein,
_ = met.plot(n_iter*arange(1, len(wasserstein)+1), 100-wasserstein,
label=r'$d_W$', color='red')
# met.axis([0, len(wasserstein), 0, 100])
# met.set_xticks(arange(0,len(wasserstein),10))
detection.set_xlabel('Iteration')
detection.set_ylabel(r'Recovery rate (in %)')
met.legend(loc='lower right')
met.axis([0, len(wasserstein), 0, 100])
met.set_xticks(arange(0, n_iter*len(wasserstein)+1, n_iter))
met.set_xlabel('Iteration')
met.set_ylabel(r'Recovery rate (in %)')
met.legend(loc='upper left')

# plt.tight_layout(.5)
plt.tight_layout(.5)
plt.savefig(figname+'.png')

def _generate_testbed(kernel_init_len, n_nonzero_coefs, n_kernels,
Expand All @@ -57,8 +59,7 @@ def _generate_testbed(kernel_init_len, n_nonzero_coefs, n_kernels,
Return the dictionary, the dataset and an array indicated how atoms are combined
to obtain each sample
"""
print('Dictionary sampled from uniform distribution')
dico = [rand(kernel_init_len, n_dims) for i in range(n_kernels)]
dico = [randn(kernel_init_len, n_dims) for i in range(n_kernels)]
for i in range(len(dico)):
dico[i] /= norm(dico[i], 'fro')

Expand Down Expand Up @@ -91,7 +92,7 @@ def _generate_testbed(kernel_init_len, n_nonzero_coefs, n_kernels,
n_samples, n_dims = 1500, 1
n_features = kernel_init_len = 20
n_nonzero_coefs = 3
n_kernels, max_iter, learning_rate = 50, 25, 1.5
n_kernels, max_iter, n_iter, learning_rate = 50, 10, 5, 1.5
n_jobs, batch_size = -1, 10
detection_rate, wasserstein, objective_error = list(), list(), list()

Expand All @@ -104,24 +105,26 @@ def _generate_testbed(kernel_init_len, n_nonzero_coefs, n_kernels,
for i in range(len(dict_init)):
dict_init[i] /= norm(dict_init[i], 'fro')
learned_dict = MiniBatchMultivariateDictLearning(n_kernels=n_kernels,
batch_size=batch_size, n_iter=1,
batch_size=batch_size, n_iter=n_iter,
n_nonzero_coefs=n_nonzero_coefs,
n_jobs=n_jobs, learning_rate=learning_rate,
kernel_init_len=kernel_init_len, verbose=1,
dict_init=dict_init, random_state=rng_global)

# Update learned dictionary at each iteration and compute a distance
# with the generating dictionary
for i in range(max_iter):
learned_dict = learned_dict.partial_fit(X)
# Compute the detection rate
detection_rate.append(detectionRate(learned_dict.kernels_,
generating_dict, 0.97))
generating_dict, 0.99))
# Compute the Wasserstein distance
wasserstein.append(emd(learned_dict.kernels_, generating_dict,
'chordal', scale=True))
# Get the objective error
objective_error.append(array(learned_dict.error_ ).sum())

plot_univariate(array(objective_error), array(detection_rate),
array(wasserstein), 'univariate-case')
array(wasserstein), n_iter, 'univariate-case')

# Another possibility is to rely on a callback function such as
10 changes: 7 additions & 3 deletions mdla.py
Original file line number Diff line number Diff line change
Expand Up @@ -993,10 +993,14 @@ def multivariate_dict_learning_online(X, n_kernels=2, n_nonzero_coefs=1,
if callback is not None:
callback(locals())

if ii == 1 and verbose == 1:
print ('Expecting this learning experiment to finish in',
if ii == (iter_offset+1)*int(n_batches) and verbose == 1:
print ('Expecting this learning iterations to finish in',
(time()-t0)*(n_iter-iter_offset)/60., 'm')

# if verbose == 1:
print ('Time from begining is', time()-t0, 's, with n_iter=',
n_iter, ', iter_offset=', iter_offset,
', i.e.', n_iter-iter_offset, 'iterations to go. ii=',
ii)

# Cost function
current_cost = 0.0
Expand Down

0 comments on commit 0d8c219

Please sign in to comment.