Skip to content

Commit

Permalink
Merge pull request #4198 from karlnapf/feature/factory
Browse files Browse the repository at this point in the history
Feature/factory
  • Loading branch information
karlnapf committed Mar 20, 2018
2 parents c8cc22c + 7b43b50 commit 2362d20
Show file tree
Hide file tree
Showing 58 changed files with 945 additions and 305 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ We then create a :sgclass:`CKernelMachine` instance, here :sgclass:`CLibSVM`, an

.. sgexample:: kernel_support_vector_machine.sg:create_instance

Then we train it on training data and apply it to test data, which here gives :sgclass:`CBinaryLabels`.
Then we train it on training data and apply it to test data. This gives :sgclass:`CLabels`, which we can extract the label vector from.

.. sgexample:: kernel_support_vector_machine.sg:train_and_apply

Expand Down
2 changes: 1 addition & 1 deletion examples/meta/generator/targets/cpp.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"Comment": "//$comment\n",
"Init": {
"Construct": "auto $name = some<C$typeName>($arguments)$kwargs",
"Copy": "auto $name = $expr$kwargs",
"Copy": "auto $name = wrap($expr)$kwargs",
"KeywordArguments": {
"List": ";\n$elements",
"Element": "$name->put(\"$keyword\", $expr)",
Expand Down
1 change: 1 addition & 0 deletions examples/meta/ruby/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ STRING(REGEX REPLACE "(.*)/narray.*$" "\\1" NARRAY_PATH ${NARRAY_LIB})

LIST(APPEND EXCLUDED_RUBY_META_EXAMPLES
base_api-put_get
base_api-factory
)

# add test case for each generated example
Expand Down
13 changes: 13 additions & 0 deletions examples/meta/src/base_api/factory.sg
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
Machine lib_svm = machine("LibSVM")
Machine lda = machine("LDA")
Kernel kernel_gaussian = kernel("GaussianKernel")
Kernel kernel_linear = kernel("LinearKernel")

RealMatrix real_matrix(2,2)
Features features_from_matrix = features(real_matrix)

File features_file = csv_file("../../data/classifier_binary_2d_nonlinear_features_train.dat")
Features features_from_file = features(features_file, enum EPrimitiveType.PT_FLOAT64)

File labels_file = csv_file("../../data/classifier_binary_2d_nonlinear_labels_train.dat")
Labels labels_from_file = labels(labels_file)
4 changes: 0 additions & 4 deletions examples/meta/src/base_api/objects.sg

This file was deleted.

22 changes: 11 additions & 11 deletions examples/meta/src/binary/kernel_support_vector_machine.sg
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
CSVFile f_feats_train("../../data/classifier_binary_2d_nonlinear_features_train.dat")
CSVFile f_feats_test("../../data/classifier_binary_2d_nonlinear_features_test.dat")
CSVFile f_labels_train("../../data/classifier_binary_2d_nonlinear_labels_train.dat")
CSVFile f_labels_test("../../data/classifier_binary_2d_nonlinear_labels_test.dat")
File f_feats_train = csv_file("../../data/classifier_binary_2d_nonlinear_features_train.dat")
File f_feats_test = csv_file("../../data/classifier_binary_2d_nonlinear_features_test.dat")
File f_labels_train = csv_file("../../data/classifier_binary_2d_nonlinear_labels_train.dat")
File f_labels_test = csv_file("../../data/classifier_binary_2d_nonlinear_labels_test.dat")

#![create_features]
RealFeatures features_train(f_feats_train)
RealFeatures features_test(f_feats_test)
BinaryLabels labels_train(f_labels_train)
BinaryLabels labels_test(f_labels_test)
Features features_train = features(f_feats_train)
Features features_test = features(f_feats_test)
Labels labels_train = labels(f_labels_train)
Labels labels_test = labels(f_labels_test)
#![create_features]

#![set_parameters]
Expand All @@ -20,7 +20,8 @@ Machine svm = machine("LibSVM", C1=1.0, C2=1.0, kernel=k, labels=labels_train, e

#![train_and_apply]
svm.train(features_train)
BinaryLabels labels_predict = svm.apply_binary(features_test)
Labels labels_predict = svm.apply(features_test)
RealVector labels = labels_predict.get_real_vector("labels")
#![train_and_apply]

#![extract_weights_bias]
Expand All @@ -33,5 +34,4 @@ AccuracyMeasure eval()
real accuracy = eval.evaluate(labels_predict, labels_test)
#![evaluate_accuracy]

# additional integration testing variables
RealVector output = labels_predict.get_real_vector("labels")

21 changes: 9 additions & 12 deletions examples/meta/src/multiclass/gaussian_naive_bayes.sg
Original file line number Diff line number Diff line change
@@ -1,23 +1,20 @@
CSVFile f_feats_train("../../data/classifier_4class_2d_linear_features_train.dat")
CSVFile f_feats_test("../../data/classifier_4class_2d_linear_features_test.dat")
CSVFile f_labels_train("../../data/classifier_4class_2d_linear_labels_train.dat")
File f_feats_train = csv_file("../../data/classifier_4class_2d_linear_features_train.dat")
File f_feats_test = csv_file("../../data/classifier_4class_2d_linear_features_test.dat")
File f_labels_train = csv_file("../../data/classifier_4class_2d_linear_labels_train.dat")

#![create_features]
RealFeatures features_train(f_feats_train)
RealFeatures features_test(f_feats_test)
MulticlassLabels labels_train(f_labels_train)
Features features_train = features(f_feats_train)
Features features_test = features(f_feats_test)
Labels labels_train = labels(f_labels_train)
#![create_features]


#![create_instance]
GaussianNaiveBayes gnb(features_train, labels_train)
Machine gnb = machine("GaussianNaiveBayes", features=features_train, labels=labels_train)
#![create_instance]

#![train_and_apply]
gnb.train()
MulticlassLabels labels_predict = gnb.apply_multiclass(features_test)
Labels labels_predict = gnb.apply(features_test)
RealVector labels = labels_predict.get_real_vector("labels")
#![train_and_apply]


# integration testing variables
RealVector output = labels_predict.get_labels()
31 changes: 14 additions & 17 deletions examples/meta/src/regression/support_vector_regression.sg
Original file line number Diff line number Diff line change
@@ -1,39 +1,36 @@
CSVFile f_feats_train("../../data/regression_1d_sinc_features_train.dat")
CSVFile f_feats_test("../../data/regression_1d_sinc_features_test.dat")
CSVFile f_labels_train("../../data/regression_1d_sinc_labels_train.dat")
CSVFile f_labels_test("../../data/regression_1d_sinc_labels_test.dat")
File f_feats_train=csv_file("../../data/regression_1d_sinc_features_train.dat")
File f_feats_test=csv_file("../../data/regression_1d_sinc_features_test.dat")
File f_labels_train=csv_file("../../data/regression_1d_sinc_labels_train.dat")
File f_labels_test=csv_file("../../data/regression_1d_sinc_labels_test.dat")

#![create_features]
RealFeatures features_train(f_feats_train)
RealFeatures features_test(f_feats_test)
RegressionLabels labels_train(f_labels_train)
RegressionLabels labels_test(f_labels_test)
Features features_train=features(f_feats_train)
Features features_test=features(f_feats_test)
Labels labels_train=labels(f_labels_train)
Labels labels_test=labels(f_labels_test)
#![create_features]

#![create_appropriate_kernel]
real width = 1.0
GaussianKernel kernel(width)
Kernel k=kernel("GaussianKernel", log_width=1.0)
#![create_appropriate_kernel]

#![create_instance]
real svm_c = 1.0
real svr_param = 0.1
LibSVR svr(svm_c, svr_param, kernel, labels_train, enum LIBSVR_SOLVER_TYPE.LIBSVR_EPSILON_SVR)
Machine svr=machine("LibSVR", C1=1.0, C2=1.0, tube_epsilon=0.1, kernel=k, labels=labels_train)
# , solver_type=enum LIBSVR_SOLVER_TYPE.LIBSVR_EPSILON_SVR)
#![create_instance]

#![train_and_apply]
svr.train(features_train)
RegressionLabels labels_predict = svr.apply_regression(features_test)
Labels labels_predict = svr.apply(features_test)
RealVector output = labels_predict.get_real_vector("labels")
#![train_and_apply]

#![extract_alpha]
RealVector alpha = svr.get_alphas()
RealVector alpha = svr.get_real_vector("m_alpha")
#![extract_alpha]

#![evaluate_error]
MeanSquaredError eval()
real mse = eval.evaluate(labels_predict, labels_test)
#![evaluate_error]

# integration testing variables
RealVector output = labels_test.get_labels()
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ void test_cross_validation()
lab.vector[i]=i<num_vectors/2 ? -1.0 : 1.0;

CBinaryLabels* labels=new CBinaryLabels(lab);
SG_REF(labels);

/* gaussian kernel */
int32_t kernel_cache=100;
Expand All @@ -75,6 +76,7 @@ void test_cross_validation()
/* train and output */
svm->train(features);
CBinaryLabels* output=CLabelsFactory::to_binary(svm->apply(features));
SG_REF(output);
for (index_t i=0; i<num_vectors; ++i)
SG_SPRINT("i=%d, class=%f,\n", i, output->get_label(i));

Expand Down Expand Up @@ -113,6 +115,8 @@ void test_cross_validation()
result->print_result();

/* clean up */
SG_UNREF(labels);
SG_UNREF(output)
SG_UNREF(result);
SG_UNREF(cross);
SG_UNREF(features);
Expand All @@ -122,8 +126,6 @@ int main(int argc, char **argv)
{
init_shogun(&print_message, &print_message, &print_message);

sg_io->set_loglevel(MSG_DEBUG);

test_cross_validation();

exit_shogun();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ void test_cross_validation()
lab.vector[i]=i<num_vectors/2 ? -1.0 : 1.0;

CBinaryLabels* labels=new CBinaryLabels(lab);
SG_REF(labels);

/* gaussian kernel */
CGaussianKernel* kernel=new CGaussianKernel();
Expand All @@ -74,6 +75,7 @@ void test_cross_validation()
SG_SPRINT("starting normal training\n");
svm->train(features);
CBinaryLabels* output=CLabelsFactory::to_binary(svm->apply(features));
SG_REF(output);

/* evaluation criterion */
CContingencyTableEvaluation* eval_crit=
Expand Down Expand Up @@ -147,6 +149,8 @@ void test_cross_validation()
SG_SPRINT("%f sec\n", time.cur_time_diff());

/* clean up */
SG_UNREF(labels);
SG_UNREF(output);
SG_UNREF(cross);
SG_UNREF(features);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ void test_cross_validation()
/* train and output */
krr->train(features);
CRegressionLabels* output= CLabelsFactory::to_regression(krr->apply());
SG_REF(output);
for (index_t i=0; i<num_vectors; ++i)
{
SG_SPRINT("x=%f, train=%f, predict=%f\n", train_dat.matrix[i],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ void test()
SGVector<index_t>::display_vector(indices.vector, indices.vlen, "training indices");
svm->train_locked(indices);
CBinaryLabels* output=CLabelsFactory::to_binary(svm->apply());
SG_REF(output);
SGVector<float64_t>::display_vector(output->get_labels().vector, output->get_num_labels(), "apply() output");
SGVector<float64_t>::display_vector(labels->get_labels().vector, labels->get_labels().vlen, "training labels");
SG_SPRINT("accuracy: %f\n", eval->evaluate(output, labels));
Expand All @@ -103,6 +104,7 @@ void test()
indices.vector[2]=3;
SGVector<index_t>::display_vector(indices.vector, indices.vlen, "training indices");
output=CLabelsFactory::to_binary(svm->apply());
SG_REF(output);
SGVector<float64_t>::display_vector(output->get_labels().vector, output->get_num_labels(), "apply() output");
SGVector<float64_t>::display_vector(labels->get_labels().vector, labels->get_labels().vlen, "training labels");
SG_SPRINT("accuracy: %f\n", eval->evaluate(output, labels));
Expand All @@ -115,6 +117,7 @@ void test()
SGVector<index_t>::display_vector(indices.vector, indices.vlen, "training indices");
svm->train_locked(indices);
output=CLabelsFactory::to_binary(svm->apply());
SG_REF(output);
SGVector<float64_t>::display_vector(output->get_labels().vector, output->get_num_labels(), "apply() output");
SGVector<float64_t>::display_vector(labels->get_labels().vector, labels->get_labels().vlen, "training labels");
SG_SPRINT("accuracy: %f\n", eval->evaluate(output, labels));
Expand All @@ -125,6 +128,7 @@ void test()
svm->data_unlock();
svm->train();
output=CLabelsFactory::to_binary(svm->apply());
SG_REF(output);
ASSERT(eval->evaluate(output, labels)==1);
SGVector<float64_t>::display_vector(output->get_labels().vector, output->get_num_labels(), "output");
SGVector<float64_t>::display_vector(labels->get_labels().vector, labels->get_labels().vlen, "training labels");
Expand Down
4 changes: 4 additions & 0 deletions examples/undocumented/libshogun/regression_libsvr.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,9 @@ void test_libsvr()

/* shogun representation */
CLabels* labels_train=new CRegressionLabels(lab_train);
SG_REF(labels_train);
CLabels* labels_test=new CRegressionLabels(lab_test);
SG_REF(labels_test);
CDenseFeatures<float64_t>* features_train=new CDenseFeatures<float64_t>(
feat_train);
CDenseFeatures<float64_t>* features_test=new CDenseFeatures<float64_t>(
Expand All @@ -55,6 +57,7 @@ void test_libsvr()
/* predict */
CRegressionLabels* predicted_labels=CLabelsFactory::to_regression(
svm->apply(features_test));
SG_REF(predicted_labels);

/* evaluate */
CEvaluation* eval=new CMeanSquaredError();
Expand All @@ -66,6 +69,7 @@ void test_libsvr()
SG_UNREF(labels_test)
SG_UNREF(predicted_labels);
SG_UNREF(svm);
SG_UNREF(labels_train);
}

int main()
Expand Down
2 changes: 0 additions & 2 deletions src/interfaces/swig/Kernel.i
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@
* Copyright (C) 2009 Fraunhofer Institute FIRST and Max-Planck-Society
*/

%newobject kernel();

#ifdef HAVE_PYTHON
%feature("autodoc", "get_kernel_matrix(self) -> numpy 2dim array of float") get_kernel_matrix;
%feature("autodoc", "get_POIM2(self) -> [] of float") get_POIM2;
Expand Down
1 change: 0 additions & 1 deletion src/interfaces/swig/Machine.i
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
%newobject apply_structured(CFeatures* data);
%newobject apply_latent();
%newobject apply_latent(CFeatures* data);
%newobject machine();

#if defined(SWIGPYTHON) || defined(SWIGOCTAVE) || defined(SWIGRUBY) || defined(SWIGLUA) || defined(SWIGR)

Expand Down
6 changes: 6 additions & 0 deletions src/interfaces/swig/factory.i
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
%{
#include <shogun/util/factory.h>
%}
%include <shogun/util/factory.h>

%template(features) shogun::features<float64_t>;
6 changes: 5 additions & 1 deletion src/interfaces/swig/shogun.i
Original file line number Diff line number Diff line change
Expand Up @@ -115,14 +115,18 @@
%include "Boost.i"

%include "ParameterObserver.i"
%include "factory.i"

#if defined(SWIGPERL)
%include "abstract_types_extension.i"
#endif

%pragma(java) moduleimports=%{
import org.jblas.*;
%}

namespace shogun
{

%extend CSGObject
{
template <typename T, typename U= typename std::enable_if_t<std::is_arithmetic<T>::value>>
Expand Down
21 changes: 10 additions & 11 deletions src/shogun/base/SGObject.h
Original file line number Diff line number Diff line change
Expand Up @@ -492,36 +492,35 @@ class CSGObject
*/
std::vector<std::string> parameter_names() const;

/**
* Utility method to specialize the feature to the required type.
/** Specializes a provided object to the specified type.
* Throws exception if the object cannot be specialized.
*
* @param sgo CSGObject base type
* @return The requested type if casting was successful.
* @param sgo object of CSGObject base type
* @return The requested type
*/
template<class T> static T* as(CSGObject* sgo)
{
REQUIRE(sgo, "No object provided!\n");
return sgo->as<T>();
}

/**
* Utility method to specialize the feature to the required type.
/** Specializes the object to the specified type.
* Throws exception if the object cannot be specialized.
*
* @param sgo CSGObject base type
* @return The requested type if casting was successful, or throws exception.
* @return The requested type
*/
template<class T> T* as()
{
T* c = dynamic_cast<T*>(this);
auto c = dynamic_cast<T*>(this);
if (c)
return c;

SG_SERROR("The object (%s) cannot be casted to the requested type %s!\n",
SG_SERROR(
"Object of type %s cannot be converted to type %s.\n",
demangled_type<std::remove_pointer_t<decltype(this)>>().c_str(),
demangled_type<T>().c_str());
return nullptr;
}

#ifndef SWIG
/**
* Get parameters observable
Expand Down

0 comments on commit 2362d20

Please sign in to comment.