Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tag-register all raw arrays #4116

Merged
merged 1 commit into from Jan 25, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 3 additions & 0 deletions src/shogun/classifier/mkl/MKL.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,10 @@ void CMKL::register_params()
SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl", MS_NOT_AVAILABLE);
SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter", MS_NOT_AVAILABLE);
SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter", MS_NOT_AVAILABLE);

m_parameters->add_vector(&beta_local, &beta_local_size, "beta_local", "subkernel weights on L1 term of elastic net mkl");
watch_param("beta_local", &beta_local, &beta_local_size);

SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps", MS_NOT_AVAILABLE);
SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon", MS_NOT_AVAILABLE);
SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt.", MS_NOT_AVAILABLE);
Expand Down
7 changes: 7 additions & 0 deletions src/shogun/features/PolyFeatures.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -392,14 +392,21 @@ void CPolyFeatures::register_parameters()
"multi_index",
"Flattened matrix of all multi indices that sum do the"
" degree of the polynomial kernel.");
watch_param("multi_index", &m_multi_index, &multi_index_length);

multinomial_coefficients_length=m_output_dimensions;
m_parameters->add_vector(&m_multinomial_coefficients,
&multinomial_coefficients_length, "multinomial_coefficients",
"Multinomial coefficients for all multi-indices.");
watch_param(
"multinomial_coefficients", &m_multinomial_coefficients,
&multinomial_coefficients_length);

normalization_values_length=get_num_vectors();
m_parameters->add_vector(&m_normalization_values,
&normalization_values_length, "normalization_values",
"Norm of each training example.");
watch_param(
"normalization_values", &m_normalization_values,
&normalization_values_length);
}
4 changes: 4 additions & 0 deletions src/shogun/features/SparseFeatures.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -612,6 +612,10 @@ template<class ST> void CSparseFeatures<ST>::init()
m_parameters->add_vector(&sparse_feature_matrix.sparse_matrix, &sparse_feature_matrix.num_vectors,
"sparse_feature_matrix",
"Array of sparse vectors.");
watch_param(
"sparse_feature_matrix", &sparse_feature_matrix.sparse_matrix,
&sparse_feature_matrix.num_vectors);

m_parameters->add(&sparse_feature_matrix.num_features, "sparse_feature_matrix.num_features",
"Total number of features.");
}
Expand Down
5 changes: 5 additions & 0 deletions src/shogun/features/SparsePolyFeatures.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -257,9 +257,14 @@ void CSparsePolyFeatures::init()
"Dimensions of the input space.");
m_parameters->add(&m_output_dimensions, "output_dimensions",
"Dimensions of the feature space of the polynomial kernel.");

m_normalization_values_len = get_num_vectors();
m_parameters->add_vector(&m_normalization_values, &m_normalization_values_len,
"m_normalization_values", "Norm of each training example");
watch_param(
"m_normalization_values", &m_normalization_values,
&m_normalization_values_len);

m_parameters->add(&mask, "mask", "Mask.");
m_parameters->add(&m_hash_bits, "m_hash_bits", "Number of bits in hash");
}
5 changes: 5 additions & 0 deletions src/shogun/kernel/CombinedKernel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -663,10 +663,15 @@ void CCombinedKernel::init()

SG_ADD((CSGObject**) &kernel_array, "kernel_array", "Array of kernels.",
MS_AVAILABLE);

m_parameters->add_vector(&sv_idx, &sv_count, "sv_idx",
"Support vector index.");
watch_param("sv_idx", &sv_idx, &sv_count);

m_parameters->add_vector(&sv_weight, &sv_count, "sv_weight",
"Support vector weights.");
watch_param("sv_weight", &sv_weight, &sv_count);

SG_ADD(&append_subkernel_weights, "append_subkernel_weights",
"If subkernel weights are appended.", MS_AVAILABLE);
SG_ADD(&initialized, "initialized", "Whether kernel is ready to be used.",
Expand Down
4 changes: 4 additions & 0 deletions src/shogun/kernel/normalizer/DiceKernelNormalizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,12 @@ class CDiceKernelNormalizer : public CKernelNormalizer
{
m_parameters->add_vector(&diag_lhs, &num_diag_lhs, "diag_lhs",
"K(x,x) for left hand side examples.");
watch_param("diag_lhs", &diag_lhs, &num_diag_lhs);

m_parameters->add_vector(&diag_rhs, &num_diag_rhs, "diag_rhs",
"K(x,x) for right hand side examples.");
watch_param("diag_rhs", &diag_rhs, &num_diag_rhs);

SG_ADD(&use_optimized_diagonal_computation,
"use_optimized_diagonal_computation",
"flat if optimized diagonal computation is used", MS_NOT_AVAILABLE);
Expand Down
4 changes: 4 additions & 0 deletions src/shogun/kernel/normalizer/SqrtDiagKernelNormalizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,12 @@ class CSqrtDiagKernelNormalizer : public CKernelNormalizer
{
m_parameters->add_vector(&sqrtdiag_lhs, &num_sqrtdiag_lhs, "sqrtdiag_lhs",
"sqrt(K(x,x)) for left hand side examples.");
watch_param("sqrtdiag_lhs", &sqrtdiag_lhs, &num_sqrtdiag_lhs);

m_parameters->add_vector(&sqrtdiag_rhs, &num_sqrtdiag_rhs, "sqrtdiag_rhs",
"sqrt(K(x,x)) for right hand side examples.");
watch_param("sqrtdiag_rhs", &sqrtdiag_rhs, &num_sqrtdiag_rhs);

SG_ADD(&use_optimized_diagonal_computation,
"use_optimized_diagonal_computation",
"flat if optimized diagonal computation is used", MS_NOT_AVAILABLE);
Expand Down
3 changes: 3 additions & 0 deletions src/shogun/kernel/normalizer/ZeroMeanCenterKernelNormalizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,11 @@ class CZeroMeanCenterKernelNormalizer : public CKernelNormalizer
{
m_parameters->add_vector(&ktrain_row_means, &num_ktrain,
"num_ktrain", "Train row means.");
watch_param("num_ktrain", &ktrain_row_means, &num_ktrain);

m_parameters->add_vector(&ktest_row_means, &num_ktest,
"num_ktest","Test row means.");
watch_param("num_ktest", &ktest_row_means, &num_ktest);
}

/** default destructor */
Expand Down
16 changes: 16 additions & 0 deletions src/shogun/kernel/string/HistogramWordStringKernel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -410,14 +410,30 @@ void CHistogramWordStringKernel::init()

SG_ADD(&initialized, "initialized", "If kernel is initalized.",
MS_NOT_AVAILABLE);

m_parameters->add_vector(&plo_lhs, &num_lhs, "plo_lhs");
watch_param("plo_lhs", &plo_lhs, &num_lhs);

m_parameters->add_vector(&plo_rhs, &num_rhs, "plo_rhs");
watch_param("plo_rhs", &plo_rhs, &num_rhs);

m_parameters->add_vector(&ld_mean_lhs, &num_lhs, "ld_mean_lhs");
watch_param("ld_mean_lhs", &ld_mean_lhs, &num_lhs);

m_parameters->add_vector(&ld_mean_rhs, &num_rhs, "ld_mean_rhs");
watch_param("ld_mean_rhs", &ld_mean_rhs, &num_rhs);

m_parameters->add_vector(&sqrtdiag_lhs, &num_lhs, "sqrtdiag_lhs");
watch_param("sqrtdiag_lhs", &sqrtdiag_lhs, &num_lhs);

m_parameters->add_vector(&sqrtdiag_rhs, &num_rhs, "sqrtdiag_rhs");
watch_param("sqrtdiag_rhs", &sqrtdiag_rhs, &num_rhs);

m_parameters->add_vector(&mean, &num_params2, "mean");
watch_param("mean", &mean, &num_params2);

m_parameters->add_vector(&variance, &num_params2, "variance");
watch_param("variance", &variance, &num_params2);

SG_ADD((CSGObject**) &estimate, "estimate", "Plugin Estimate.",
MS_NOT_AVAILABLE);
Expand Down
4 changes: 4 additions & 0 deletions src/shogun/kernel/string/SNPStringKernel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -181,8 +181,12 @@ void CSNPStringKernel::register_params()
SG_ADD(&m_win_len, "m_win_len", "the window length", MS_AVAILABLE);
SG_ADD(&m_inhomogene, "m_inhomogene",
"the mark of whether it's an inhomogeneous poly kernel", MS_NOT_AVAILABLE);

m_parameters->add_vector(&m_str_min, &m_str_len, "m_str_min", "allele A");
watch_param("m_str_min", &m_str_min, &m_str_len);

m_parameters->add_vector(&m_str_maj, &m_str_len, "m_str_maj", "allele B");
watch_param("m_str_maj", &m_str_maj, &m_str_len);
}

void CSNPStringKernel::init()
Expand Down
3 changes: 3 additions & 0 deletions src/shogun/kernel/string/SpectrumRBFKernel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,10 @@ void CSpectrumRBFKernel::register_param()
SG_ADD(&width, "width", "width of Gaussian", MS_AVAILABLE);
SG_ADD(&nof_sequences, "nof_sequences", "length of the sequence",
MS_NOT_AVAILABLE);

m_parameters->add_vector(&sequences, &nof_sequences, "sequences", "the sequences as a part of profile");
watch_param("sequences", &sequences, &nof_sequences);

SG_ADD(&max_sequence_length,
"max_sequence_length", "max length of the sequence", MS_NOT_AVAILABLE);
}
Expand Down
1 change: 1 addition & 0 deletions src/shogun/kernel/string/WeightedCommWordStringKernel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -312,4 +312,5 @@ void CWeightedCommWordStringKernel::init()

m_parameters->add_vector(&weights, &degree, "weights",
"weights for each of the subkernels of degree 1...d");
watch_param("weights", &weights, &degree);
}
12 changes: 12 additions & 0 deletions src/shogun/kernel/string/WeightedDegreePositionStringKernel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1939,15 +1939,27 @@ void CWeightedDegreePositionStringKernel::init()
m_parameters->add_vector(&position_weights, &position_weights_len,
"position_weights",
"Weights per position.");
watch_param("position_weights", &position_weights, &position_weights_len);

m_parameters->add_vector(&position_weights_lhs, &position_weights_lhs_len,
"position_weights_lhs",
"Weights per position left hand side.");
watch_param(
"position_weights_lhs", &position_weights_lhs,
&position_weights_lhs_len);

m_parameters->add_vector(&position_weights_rhs, &position_weights_rhs_len,
"position_weights_rhs",
"Weights per position right hand side.");
watch_param(
"position_weights_rhs", &position_weights_rhs,
&position_weights_rhs_len);

m_parameters->add_vector(&shift, &shift_len,
"shift",
"Shift Vector.");
watch_param("shift", &shift, &shift_len);

SG_ADD(&max_shift, "max_shift", "Maximal shift.", MS_AVAILABLE);
SG_ADD(&mkl_stepsize, "mkl_stepsize", "MKL step size.", MS_AVAILABLE);
SG_ADD(&degree, "degree", "Order of WD kernel.", MS_AVAILABLE);
Expand Down
2 changes: 2 additions & 0 deletions src/shogun/kernel/string/WeightedDegreeStringKernel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1015,6 +1015,8 @@ void CWeightedDegreeStringKernel::init()
m_parameters->add_vector(&position_weights, &position_weights_len,
"position_weights",
"Weights per position.");
watch_param("position_weights", &position_weights, &position_weights_len);

SG_ADD(&mkl_stepsize, "mkl_stepsize", "MKL step size.", MS_AVAILABLE);
SG_ADD(&degree, "degree", "Order of WD kernel.", MS_AVAILABLE);
SG_ADD(&max_mismatch, "max_mismatch",
Expand Down
16 changes: 9 additions & 7 deletions src/shogun/labels/MultilabelLabels.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -76,13 +76,15 @@ CMultilabelLabels::init(int32_t num_labels, int32_t num_classes)

// Can only be enabled after this issue has been solved:
// https://github.com/shogun-toolbox/shogun/issues/1972
/* this->m_parameters->add(&m_num_labels, "m_num_labels",
"Number of labels.");
this->m_parameters->add(&m_num_classes, "m_num_classes",
"Number of classes.");
this->m_parameters->add_vector(&m_labels, &m_num_labels, "labels_array",
"The label vectors for all (num_labels) outputs.");
*/
/* this->m_parameters->add(&m_num_labels, "m_num_labels",
"Number of labels.");
this->m_parameters->add(&m_num_classes, "m_num_classes",
"Number of classes.");

this->m_parameters->add_vector(&m_labels, &m_num_labels, "labels_array",
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think you don't need to change anything here.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I will undo the changes.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I removed tag register here, by mistake i didn't remove new line. hope it's ok.

"The label vectors for all (num_labels)
outputs.");
*/

m_num_labels = num_labels;
m_num_classes = num_classes;
Expand Down
2 changes: 2 additions & 0 deletions src/shogun/lib/DynamicArray.h
Original file line number Diff line number Diff line change
Expand Up @@ -623,6 +623,8 @@ template <class T> class CDynamicArray :public CSGObject
m_parameters->add_vector(&m_array.array,
&m_array.current_num_elements, "array",
"Memory for dynamic array.");
watch_param("array", &m_array.array, &m_array.current_num_elements);

SG_ADD(&m_array.resize_granularity,
"resize_granularity",
"shrink/grow step size.", MS_NOT_AVAILABLE);
Expand Down
4 changes: 4 additions & 0 deletions src/shogun/mathematics/linalg/linop/SparseMatrixOperator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,10 @@ void CSparseMatrixOperator<T>::init()
this->m_parameters->add_vector(&m_operator.sparse_matrix,
&m_operator.num_vectors, "sparse_matrix",
"The sparse matrix of the linear operator.");
this->watch_param(
"sparse_matrix", &m_operator.sparse_matrix,
&m_operator.num_vectors);

this->m_parameters->add(&m_operator.num_features,
"m_operator.num_features", "Number of features.");
}
Expand Down
4 changes: 4 additions & 0 deletions src/shogun/modelselection/ModelSelectionParameters.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,8 @@ CParameterCombination* CModelSelectionParameters::get_single_combination(
(param_vect)[j] = ((float64_t*)m_values)[i];
}
p->add_vector(&param_vect, m_vector_length, m_node_name);
watch_param(m_node_name, &param_vect, m_vector_length);

break;
}
case MSPT_INT32_SGVECTOR:
Expand All @@ -255,6 +257,8 @@ CParameterCombination* CModelSelectionParameters::get_single_combination(
(param_vect)[j] = ((int32_t*)m_values)[i];
}
p->add_vector(&param_vect, m_vector_length, m_node_name);
watch_param(m_node_name, &param_vect, m_vector_length);

break;
}
case MSPT_FLOAT64:
Expand Down
5 changes: 5 additions & 0 deletions src/shogun/multiclass/ScatterSVM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,13 @@ CScatterSVM::~CScatterSVM()
void CScatterSVM::register_params()
{
SG_ADD((machine_int_t*) &scatter_type, "scatter_type", "Type of scatter SVM", MS_NOT_AVAILABLE);

m_parameters->add_vector(&norm_wc, &norm_wc_len, "norm_wc", "Norm of w_c");
watch_param("norm_wc", &norm_wc, &norm_wc_len);

m_parameters->add_vector(&norm_wcw, &norm_wcw_len, "norm_wcw", "Norm of w_cw");
watch_param("norm_wcw", &norm_wcw, &norm_wcw_len);

SG_ADD(&rho, "rho", "Scatter SVM rho", MS_NOT_AVAILABLE);
SG_ADD(&m_num_classes, "m_num_classes", "Number of classes", MS_NOT_AVAILABLE);
}
Expand Down
7 changes: 7 additions & 0 deletions src/shogun/preprocessor/RandomFourierGaussPreproc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,10 @@ CRandomFourierGaussPreproc::CRandomFourierGaussPreproc() :
SG_ADD(&cur_kernelwidth, "cur_kernelwidth", "Kernel width.", MS_AVAILABLE);

m_parameters->add_vector(&randomcoeff_additive,&cur_dim_feature_space,"randomcoeff_additive");
watch_param(
"randomcoeff_additive", &randomcoeff_additive,
&cur_dim_feature_space);

m_parameters->add_matrix(&randomcoeff_multiplicative,&cur_dim_feature_space,&cur_dim_input_space,"randomcoeff_multiplicative");
watch_param(
"randomcoeff_multiplicative", &randomcoeff_multiplicative,
Expand Down Expand Up @@ -120,6 +124,9 @@ CRandomFourierGaussPreproc::CRandomFourierGaussPreproc(
SG_ADD(&cur_kernelwidth, "cur_kernelwidth", "Kernel width.", MS_AVAILABLE);

m_parameters->add_vector(&randomcoeff_additive,&cur_dim_feature_space,"randomcoeff_additive");
watch_param(
"randomcoeff_additive", &randomcoeff_additive,
&cur_dim_feature_space);

m_parameters->add_matrix(&randomcoeff_multiplicative,&cur_dim_feature_space,&cur_dim_input_space,"randomcoeff_multiplicative");
watch_param(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,7 @@ class CMultitaskKernelPlifNormalizer: public CMultitaskKernelMklNormalizer
{
SG_ADD(&num_tasks, "num_tasks", "the number of tasks", MS_NOT_AVAILABLE);
SG_ADD(&num_betas, "num_betas", "the number of weights", MS_NOT_AVAILABLE);

m_parameters->add_vector((SGString<float64_t>**)&distance_matrix, &num_tasksqr, "distance_matrix", "distance between tasks");
m_parameters->add_vector((SGString<float64_t>**)&similarity_matrix, &num_tasksqr, "similarity_matrix", "similarity between tasks");
m_parameters->add_vector((SGString<float64_t>**)&betas, &num_betas, "num_betas", "weights");
Expand Down