Skip to content

Commit

Permalink
port kernel_svm meta example to use put/get
Browse files Browse the repository at this point in the history
  • Loading branch information
karlnapf committed Feb 28, 2018
1 parent 87d6615 commit 3da5a3c
Show file tree
Hide file tree
Showing 5 changed files with 30 additions and 22 deletions.
Expand Up @@ -28,15 +28,12 @@ Imagine we have files with training and test data. We create CDenseFeatures (her

.. sgexample:: kernel_support_vector_machine.sg:create_features

In order to run :sgclass:`CLibSVM`, we need to initialize a kernel like :sgclass:`CGaussianKernel` with training features and some parameters like :math:`C` and epsilon i.e. residual convergence parameter which is optional.

.. sgexample:: kernel_support_vector_machine.sg:set_parameters

We create an instance of the :sgclass:`CLibSVM` classifier by passing it regularization coefficient, kernel and labels.
In order to run :sgclass:`CLibSVM`, we first need to initialize a :sgclass:`CKernel` instance, such as :sgclass:`CGaussianKernel`.
We then create a :sgclass:`CKernelMachine` instance, here :sgclass:`CLibSVM`, and provide it with parameters like the regularization coefficient :math:`C`, the kernel, the training labels, and an optional residual convergence parameter epsilon.

.. sgexample:: kernel_support_vector_machine.sg:create_instance

Then we train and apply it to test data, which here gives :sgclass:`CBinaryLabels`.
Then we train it on training data and apply it to test data, which here gives :sgclass:`CBinaryLabels`.

.. sgexample:: kernel_support_vector_machine.sg:train_and_apply

Expand Down
15 changes: 6 additions & 9 deletions examples/meta/src/binary/kernel_support_vector_machine.sg
Expand Up @@ -11,24 +11,21 @@ BinaryLabels labels_test(f_labels_test)
#![create_features]

#![set_parameters]
real C = 1.0
real epsilon = 0.001
GaussianKernel gauss_kernel(features_train, features_train, 15)
Kernel k = kernel("GaussianKernel", log_width=1.0074515102711323)
#![set_parameters]

#![create_instance]
LibSVM svm(C, gauss_kernel, labels_train)
svm.set_epsilon(epsilon)
KernelMachine svm = kernel_machine("LibSVM", C1=1.0, C2=1.0, kernel=k, labels=labels_train, epsilon=0.001)
#![create_instance]

#![train_and_apply]
svm.train()
svm.train(features_train)
BinaryLabels labels_predict = svm.apply_binary(features_test)
#![train_and_apply]

#![extract_weights_bias]
RealVector alphas = svm.get_alphas()
real b = svm.get_bias()
RealVector alphas = svm.get_real_vector("m_alpha")
real b = svm.get_real("m_bias")
#![extract_weights_bias]

#![evaluate_accuracy]
Expand All @@ -37,4 +34,4 @@ real accuracy = eval.evaluate(labels_predict, labels_test)
#![evaluate_accuracy]

# additional integration testing variables
RealVector output = labels_predict.get_labels()
RealVector output = labels_predict.get_real_vector("labels")
5 changes: 4 additions & 1 deletion src/shogun/classifier/svm/LibSVM.cpp
Expand Up @@ -60,10 +60,14 @@ bool CLibSVM::train_machine(CFeatures* data)
}
kernel->init(data, data);
}
REQUIRE(kernel->get_num_vec_lhs()==m_labels->get_num_labels(),
"Number of training data (%d) must match number of labels (%d)\n",
kernel->get_num_vec_lhs(), m_labels->get_num_labels())

problem.l=m_labels->get_num_labels();
SG_INFO("%d trainlabels\n", problem.l)


// set linear term
if (m_linear_term.vlen>0)
{
Expand Down Expand Up @@ -100,7 +104,6 @@ bool CLibSVM::train_machine(CFeatures* data)
float64_t weights[2]={1.0,get_C2()/get_C1()};

ASSERT(kernel && kernel->has_features())
ASSERT(kernel->get_num_vec_lhs()==problem.l)

switch (solver_type)
{
Expand Down
15 changes: 11 additions & 4 deletions src/shogun/kernel/Kernel.cpp
Expand Up @@ -925,10 +925,17 @@ void CKernel::save_serializable_post() throw (ShogunException)
void CKernel::register_params() {
SG_ADD(&cache_size, "cache_size",
"Cache size in MB.", MS_NOT_AVAILABLE);
SG_ADD((CSGObject**) &lhs, "lhs",
"Feature vectors to occur on left hand side.", MS_NOT_AVAILABLE);
SG_ADD((CSGObject**) &rhs, "rhs",
"Feature vectors to occur on right hand side.", MS_NOT_AVAILABLE);

// SG_ADD((CSGObject**) &lhs, "lhs",
// "Feature vectors to occur on left hand side.", MS_NOT_AVAILABLE);
m_parameters->add((CSGObject**)&lhs, "lhs", "Feature vectors to occur on left hand side.");
watch_param("lhs", &lhs, AnyParameterProperties("Feature vectors to occur on left hand side."));

// SG_ADD((CSGObject**) &rhs, "rhs",
// "Feature vectors to occur on right hand side.", MS_NOT_AVAILABLE);
m_parameters->add((CSGObject**)&rhs, "rhs", "Feature vectors to occur on right hand side.");
watch_param("rhs", &rhs, AnyParameterProperties("Feature vectors to occur on right hand side."));

SG_ADD(&lhs_equals_rhs, "lhs_equals_rhs",
"If features on lhs are the same as on rhs.", MS_NOT_AVAILABLE);
SG_ADD(&num_lhs, "num_lhs", "Number of feature vectors on left hand side.",
Expand Down
8 changes: 6 additions & 2 deletions src/shogun/machine/Machine.cpp
Expand Up @@ -24,8 +24,12 @@ CMachine::CMachine()
"Maximum training time.", MS_NOT_AVAILABLE);
SG_ADD((machine_int_t*) &m_solver_type, "solver_type",
"Type of solver.", MS_NOT_AVAILABLE);
SG_ADD((CSGObject**) &m_labels, "labels",
"Labels to be used.", MS_NOT_AVAILABLE);

// SG_ADD((CSGObject**) &m_labels, "labels",
// "Labels to be used.", MS_NOT_AVAILABLE);
m_parameters->add((CSGObject**)&m_labels, "labels", "Labels to be used.");
watch_param("labels", &m_labels, AnyParameterProperties("Labels to be used."));

SG_ADD(&m_store_model_features, "store_model_features",
"Should feature data of model be stored after training?", MS_NOT_AVAILABLE);
SG_ADD(&m_data_locked, "data_locked",
Expand Down

0 comments on commit 3da5a3c

Please sign in to comment.