Skip to content

Commit

Permalink
Be more explicit in Some
Browse files Browse the repository at this point in the history
  • Loading branch information
lisitsyn committed Apr 15, 2018
1 parent 844249b commit ceb83d8
Show file tree
Hide file tree
Showing 10 changed files with 126 additions and 57 deletions.
Expand Up @@ -18,7 +18,6 @@ Features converted = ica.apply(features)

#![extract]
RealMatrix mixing_matrix = ica.get_mixing_matrix()
RealFeatures casted = RealFeatures:obtain_from_generic(converted)
RealMatrix unmixed_signal = casted.get_feature_matrix()
RealMatrix unmixed_signal = converted.get_real_matrix("feature_matrix")
#![extract]

Expand Up @@ -41,22 +41,21 @@ cross.subscribe_to_parameters(mkl_obs)
#[!create_observer]

#![evaluate_and_get_result]
CrossValidationResult result()
result = CrossValidationResult:obtain_from_generic(cross.evaluate())
CrossValidationResult result = cross.evaluate()
#![evaluate_and_get_result]

#![get_results]
real mean = result.get_mean()
real stddev = result.get_std_dev()
real mean = result.get_real("mean")
real stddev = result.get_real("std_dev")
#![get_results]

#![get_fold_machine]
CrossValidationStorage obs = mkl_obs.get_observation(0)
CrossValidationFoldStorage fold = obs.get_fold(0)
MKLClassification machine = MKLClassification:obtain_from_generic(fold.get_trained_machine())
MKLClassification machine = fold.get_trained_machine()
#![get_fold_machine]

#![get_weights]
CombinedKernel k = CombinedKernel:obtain_from_generic(machine.get_kernel())
CombinedKernel k = machine.get("kernel")
RealVector w = k.get_subkernel_weights()
#![get_weights]
#![get_weights]
10 changes: 5 additions & 5 deletions examples/meta/src/neural_nets/feedforward_net_classification.sg
Expand Up @@ -14,10 +14,10 @@ BinaryLabels labels_test(f_labels_test)

#![add_layers]
int num_feats = features_train.get_num_features()
NeuralLayers layers()
layers = layers.input(num_feats)
layers = layers.rectified_linear(10)
layers = layers.softmax(2)
NeuralLayers layers()
layers.input(num_feats)
layers.rectified_linear(10)
layers.softmax(2)
DynamicObjectArray all_layers = layers.done()
#![add_layers]

Expand Down Expand Up @@ -52,4 +52,4 @@ real accuracy = am.evaluate(labels_predict, labels_test)
#![evaluate_accuracy]

# additional integration testing variables
RealVector output = labels_predict.get_labels()
RealVector output = labels_predict.get_labels()
8 changes: 4 additions & 4 deletions examples/meta/src/neural_nets/feedforward_net_regression.sg
Expand Up @@ -14,10 +14,10 @@ RegressionLabels labels_test(f_labels_test)

#![add_layers]
int dimensions = features_train.get_num_features()
NeuralLayers layers()
layers = layers.input(dimensions)
layers = layers.rectified_linear(20)
layers = layers.linear(1)
NeuralLayers layers()
layers.input(dimensions)
layers.rectified_linear(20)
layers.linear(1)
DynamicObjectArray all_layers = layers.done()
#![add_layers]

Expand Down
129 changes: 100 additions & 29 deletions src/shogun/base/some.h
Expand Up @@ -13,16 +13,26 @@ namespace shogun
* the count and once deleted this wrapper decreases the counter.
*
*/
template <typename T>
template <class T>
class Some
{
public:
Some(const Some<T>& other);
template <typename R>
template <class R>
Some(const Some<R>& other);
explicit Some(T* other);

Some& operator=(T* other);
Some(Some<T>&& other);
template <class R>
Some(Some<R>&& other);

Some<T>& operator=(const Some<T>& other);
template <class R>
Some<T>& operator=(const Some<R>& other);

Some<T>& operator=(Some<T>&& other);
template <class R>
Some<T>& operator=(Some<R>&& other);

~Some();

static Some<T> from_raw(T* raw);
Expand All @@ -34,6 +44,7 @@ namespace shogun
* @return raw pointer (without SG_REF)
*/
operator T*() const;

/** Call member function or access member of T
*
* @return raw pointer (without SG_REF)
Expand All @@ -59,69 +70,115 @@ namespace shogun
*/
T* get() const;

void reset(T* value = nullptr);

private:
Some(T* other);
Some();
void unref();
void ref();

private:
T* raw = nullptr;
T* raw;
};

template <typename T>
template <class T>
Some<T>::Some() : raw(nullptr)
{
}
template <typename T>
Some<T>::Some(const Some<T>& other) : raw(other.raw)
template <class T>
Some<T>::Some(T* other) : raw(other)
{
ref();
}
template <typename T>
Some<T>::Some(T* other) : raw(other)
template <class T>
template <class R>
Some<T>::Some(const Some<R>& other) : raw(nullptr)
{
reset(dynamic_cast<T*>(other.get()));
ref();
}
template <class T>
template <class R>
Some<T>::Some(const Some<R>& other)
Some<T>::Some(const Some<T>& other) : raw(other.get())
{
raw = dynamic_cast<T*>(other.get());
ref();
}
template <typename T>
Some<T>& Some<T>::operator=(T* other)
template <class T>
template <class R>
Some<T>::Some(Some<R>&& other) : raw(nullptr)
{
reset(dynamic_cast<T*>(other.get()));
other.raw = nullptr;
}
template <class T>
Some<T>::Some(Some<T>&& other) : raw(other.get())
{
other.raw = nullptr;
}
template <class T>
template <class R>
Some<T>& Some<T>::operator=(const Some<R>& other)
{
if (get() != other.get())
{
reset(other.get());
ref();
}
return *this;
}
template <class T>
Some<T>& Some<T>::operator=(const Some<T>& other)
{
if (raw != other)
if (get() != other.get())
{
unref();
raw = other;
reset(other.get());
ref();
}
return *this;
}
template <class T>
template <class R>
Some<T>& Some<T>::operator=(Some<R>&& other)
{
if (get() != other.get())
{
reset(other.get());
other.raw = nullptr;
}
return *this;
}
template <class T>
Some<T>& Some<T>::operator=(Some<T>&& other)
{
if (get() != other.get())
{
reset(other.get());
other.raw = nullptr;
}
return *this;
}

template <typename T>
template <class T>
Some<T>::~Some()
{
unref();
reset();
}
template <typename T>
Some<T>::operator T*() const
{
return raw;
return get();
}
template <typename T>
template <class T>
T* Some<T>::operator->() const
{
return raw;
return get();
}
template <typename T>
template <class T>
bool Some<T>::operator==(const Some<T>& other) const
{
return raw == other.raw;
}
template <typename T>
template <class T>
bool Some<T>::operator!=(const Some<T>& other) const
{
return !((*this) == other);
Expand All @@ -131,13 +188,21 @@ namespace shogun
{
return raw;
}
template <typename T>
template <class T>
void Some<T>::reset(T* ptr)
{
unref();
raw = ptr;
}
template <class T>
void Some<T>::ref()
{
if (raw)
{
(raw)->ref();
}
}
template <typename T>
template <class T>
void Some<T>::unref()
{
if (raw)
Expand All @@ -146,7 +211,7 @@ namespace shogun
(raw) = NULL;
};
}
template <typename T>
template <class T>
Some<T> Some<T>::from_raw(T* raw)
{
Some<T> result(raw);
Expand All @@ -163,13 +228,19 @@ namespace shogun
* @return a shared pointer that holds created instance of @ref T
*
*/
template <typename T, class... Args>
template <class T, class... Args>
Some<T> some(Args&&... args)
{
T* ptr = new T(args...);
return Some<T>::from_raw(ptr);
}

template <class T>
Some<T> empty()
{
return Some<T>::from_raw(nullptr);
}

template <class T>
SG_FORCED_INLINE T wrap(const T& value)
{
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/base/SGObject_unittest.cc
Expand Up @@ -534,6 +534,6 @@ TEST(SGObject, watched_parameter_object)
obj->put(Tag<CMockObject*>("watched_object"), other_obj.get());
EXPECT_EQ(other_obj->ref_count(), 2);
EXPECT_FALSE(other_obj->equals(obj));
obj = nullptr;
obj = empty<CMockObject>();
EXPECT_EQ(other_obj->ref_count(), 1);
}
8 changes: 4 additions & 4 deletions tests/unit/base/Some_unittest.cc
Expand Up @@ -36,9 +36,9 @@ TEST(Some,reassignment)
CGaussianKernel* raw = new CGaussianKernel();
EXPECT_EQ(1, kernel->ref_count());
EXPECT_EQ(0, raw->ref_count());
kernel = raw;
EXPECT_TRUE(kernel->equals(raw));
EXPECT_EQ(1, kernel->ref_count());
kernel.reset(raw);
EXPECT_TRUE(kernel->equals(raw));
EXPECT_EQ(0, kernel->ref_count());
}

TEST(Some,self_assignment)
Expand All @@ -63,7 +63,7 @@ TEST(Some, get_method)
{
auto raw = new CGaussianKernel();
SG_REF(raw);
auto kernel = Some<CGaussianKernel>(raw);
auto kernel = wrap(raw);
EXPECT_TRUE(raw == kernel.get());
EXPECT_EQ(2, raw->ref_count());
SG_UNREF(raw);
Expand Down
Expand Up @@ -165,15 +165,15 @@ TEST_F(TrainedModelSerializationTest, {{test_name}})

machine->train();

auto predictions=Some<CLabels>(machine->apply(test_feats));
auto predictions=wrap<CLabels>(machine->apply(test_feats));

std::string filename;
ASSERT_TRUE(serialize_machine(machine, filename, {{store_model_features}}));

auto deserialized_machine=some<{{class}}>();
ASSERT_TRUE(deserialize_machine(deserialized_machine, filename));

auto deserialized_predictions=Some<CLabels>(deserialized_machine->apply(test_feats));
auto deserialized_predictions=wrap<CLabels>(deserialized_machine->apply(test_feats));

// allow for lossy serialization format
set_global_fequals_epsilon(1e-6);
Expand All @@ -195,4 +195,4 @@ set macros = {
#include <{{attrs['include']}}>
{{ macros[b](name) }}
{% endfor %}
{% endfor %}
{% endfor %}
2 changes: 1 addition & 1 deletion tests/unit/labels/MulticlassLabels_unittest.cc
Expand Up @@ -82,7 +82,7 @@ TEST_F(MulticlassLabels, multiclass_labels_from_dense)
labels->set_labels(labels_true);
auto labels2 = multiclass_labels(labels);
EXPECT_NE(labels, labels2);
ASSERT_NE(labels2, nullptr);
ASSERT_NE(labels2.get(), nullptr);
EXPECT_EQ(labels->get_labels(), labels2->get_labels());
}

Expand Down
2 changes: 1 addition & 1 deletion tests/unit/labels/RegressionLabels_unittest.cc
Expand Up @@ -43,7 +43,7 @@ TEST_F(RegressionLabels, regression_labels_from_dense)

auto labels2 = regression_labels(labels);
EXPECT_NE(labels, labels2);
ASSERT_NE(labels2, nullptr);
ASSERT_NE(labels2.get(), nullptr);
EXPECT_EQ(labels->get_labels(), labels2->get_labels());
EXPECT_EQ(labels->get_values(), labels2->get_values());
}

0 comments on commit ceb83d8

Please sign in to comment.