Skip to content

Commit

Permalink
Port kernel svm example to using features/labels factory
Browse files Browse the repository at this point in the history
Includes LibSVM using new CLabels::as_binary()
  • Loading branch information
karlnapf committed Mar 19, 2018
1 parent e789605 commit c85fedd
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 28 deletions.
2 changes: 1 addition & 1 deletion data
Expand Up @@ -33,7 +33,7 @@ We then create a :sgclass:`CKernelMachine` instance, here :sgclass:`CLibSVM`, an

.. sgexample:: kernel_support_vector_machine.sg:create_instance

Then we train it on training data and apply it to test data, which here gives :sgclass:`CBinaryLabels`.
Then we train it on training data and apply it to test data. This gives :sgclass:`CLabels`, which we can extract the label vector from.

.. sgexample:: kernel_support_vector_machine.sg:train_and_apply

Expand Down
2 changes: 1 addition & 1 deletion examples/meta/generator/targets/cpp.json
Expand Up @@ -11,7 +11,7 @@
"Comment": "//$comment\n",
"Init": {
"Construct": "auto $name = some<C$typeName>($arguments)$kwargs",
"Copy": "auto $name = $expr$kwargs",
"Copy": "auto $name = wrap($expr)$kwargs",
"KeywordArguments": {
"List": ";\n$elements",
"Element": "$name->put(\"$keyword\", $expr)",
Expand Down
22 changes: 11 additions & 11 deletions examples/meta/src/binary/kernel_support_vector_machine.sg
@@ -1,13 +1,13 @@
CSVFile f_feats_train("../../data/classifier_binary_2d_nonlinear_features_train.dat")
CSVFile f_feats_test("../../data/classifier_binary_2d_nonlinear_features_test.dat")
CSVFile f_labels_train("../../data/classifier_binary_2d_nonlinear_labels_train.dat")
CSVFile f_labels_test("../../data/classifier_binary_2d_nonlinear_labels_test.dat")
File f_feats_train = csv_file("../../data/classifier_binary_2d_nonlinear_features_train.dat")
File f_feats_test = csv_file("../../data/classifier_binary_2d_nonlinear_features_test.dat")
File f_labels_train = csv_file("../../data/classifier_binary_2d_nonlinear_labels_train.dat")
File f_labels_test = csv_file("../../data/classifier_binary_2d_nonlinear_labels_test.dat")

#![create_features]
RealFeatures features_train(f_feats_train)
RealFeatures features_test(f_feats_test)
BinaryLabels labels_train(f_labels_train)
BinaryLabels labels_test(f_labels_test)
Features features_train = features(f_feats_train)
Features features_test = features(f_feats_test)
Labels labels_train = labels(f_labels_train)
Labels labels_test = labels(f_labels_test)
#![create_features]

#![set_parameters]
Expand All @@ -20,7 +20,8 @@ Machine svm = machine("LibSVM", C1=1.0, C2=1.0, kernel=k, labels=labels_train, e

#![train_and_apply]
svm.train(features_train)
BinaryLabels labels_predict = svm.apply_binary(features_test)
Labels labels_predict = svm.apply(features_test)
RealVector labels = labels_predict.get_real_vector("labels")
#![train_and_apply]

#![extract_weights_bias]
Expand All @@ -33,5 +34,4 @@ AccuracyMeasure eval()
real accuracy = eval.evaluate(labels_predict, labels_test)
#![evaluate_accuracy]

# additional integration testing variables
RealVector output = labels_predict.get_real_vector("labels")

4 changes: 2 additions & 2 deletions src/shogun/classifier/svm/LibSVM.cpp
Expand Up @@ -48,7 +48,7 @@ bool CLibSVM::train_machine(CFeatures* data)
struct svm_node* x_space;

ASSERT(m_labels && m_labels->get_num_labels())
ASSERT(m_labels->get_label_type() == LT_BINARY)
auto binary_labels = m_labels->as_binary();

if (data)
{
Expand Down Expand Up @@ -94,7 +94,7 @@ bool CLibSVM::train_machine(CFeatures* data)

for (int32_t i=0; i<problem.l; i++)
{
problem.y[i]=((CBinaryLabels*) m_labels)->get_label(i);
problem.y[i]=binary_labels->get_label(i);
problem.x[i]=&x_space[2*i];
x_space[2*i].index=i;
x_space[2*i+1].index=-1;
Expand Down
20 changes: 8 additions & 12 deletions src/shogun/evaluation/ContingencyTableEvaluation.cpp
Expand Up @@ -12,15 +12,20 @@ using namespace shogun;

float64_t CContingencyTableEvaluation::evaluate(CLabels* predicted, CLabels* ground_truth)
{
ASSERT(predicted->get_label_type()==LT_BINARY)
ASSERT(ground_truth->get_label_type()==LT_BINARY)
REQUIRE(predicted->get_num_labels() == ground_truth->get_num_labels(),
"Number of predicted labels (%d) must be "
"equal to number of ground truth labels (%d)!\n", get_name(),
predicted->get_num_labels(), ground_truth->get_num_labels());

auto predicted_binary = predicted->as_binary();
auto ground_truth_binary = ground_truth->as_binary();

/* commented out: what if a machine only returns +1 in apply() ??
* Heiko Strathamn */
// predicted->ensure_valid();

ground_truth->ensure_valid();
compute_scores((CBinaryLabels*)predicted,(CBinaryLabels*)ground_truth);
compute_scores(predicted_binary.get(),ground_truth_binary.get());
switch (m_type)
{
case ACCURACY:
Expand Down Expand Up @@ -82,15 +87,6 @@ EEvaluationDirection CContingencyTableEvaluation::get_evaluation_direction() con

void CContingencyTableEvaluation::compute_scores(CBinaryLabels* predicted, CBinaryLabels* ground_truth)
{
ASSERT(ground_truth->get_label_type() == LT_BINARY)
ASSERT(predicted->get_label_type() == LT_BINARY)

if (predicted->get_num_labels()!=ground_truth->get_num_labels())
{
SG_ERROR("%s::compute_scores(): Number of predicted labels (%d) is not "
"equal to number of ground truth labels (%d)!\n", get_name(),
predicted->get_num_labels(), ground_truth->get_num_labels());
}
m_TP = 0.0;
m_FP = 0.0;
m_TN = 0.0;
Expand Down

0 comments on commit c85fedd

Please sign in to comment.