Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove random functions in CMath #3906

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 3 additions & 3 deletions benchmarks/hasheddoc_benchmarks.cpp
Expand Up @@ -13,7 +13,6 @@
#include <shogun/features/HashedDocDotFeatures.h>
#include <shogun/features/DenseFeatures.h>
#include <shogun/lib/NGramTokenizer.h>
#include <shogun/mathematics/Math.h>

using namespace shogun;

Expand All @@ -27,13 +26,14 @@ int main(int argv, char** argc)
int32_t num_strings = 5000;
int32_t max_str_length = 10000;
SGStringList<char> string_list(num_strings, max_str_length);

auto prng = get_prng();
std::uniform_int_distribution<index_t> dist('A', 'Z');
SG_SPRINT("Creating features...\n");
for (index_t i=0; i<num_strings; i++)
{
string_list.strings[i] = SGString<char>(max_str_length);
for (index_t j=0; j<max_str_length; j++)
string_list.strings[i].string[j] = (char) CMath::random('A', 'Z');
string_list.strings[i].string[j] = (char)dist(prng);
}
SG_SPRINT("Features were created.\n");

Expand Down
4 changes: 3 additions & 1 deletion benchmarks/rf_feats_benchmark.cpp
Expand Up @@ -16,6 +16,8 @@ int main(int argv, char** argc)

int32_t dims[] = {100, 300, 600};
CTime* timer = new CTime();
auto prng = get_prng();
std::uniform_int_distribution<int32_t> dist(0, 1);
for (index_t d=0; d<3; d++)
{
int32_t num_dim = dims[d];
Expand All @@ -27,7 +29,7 @@ int main(int argv, char** argc)
{
for (index_t j=0; j<num_dim; j++)
{
mat(j,i) = CMath::random(0,1) + 0.5;
mat(j, i) = dist(prng) + 0.5;
}
}

Expand Down
6 changes: 4 additions & 2 deletions benchmarks/rf_feats_kernel_comp.cpp
Expand Up @@ -29,6 +29,8 @@ int main(int argv, char** argc)
float64_t lin_C = 0.1;
float64_t non_lin_C = 0.1;
CPRCEvaluation* evaluator = new CPRCEvaluation();
auto prng = get_prng();
std::uniform_int_distribution<int32_t> dist(0, 1);
CSqrtDiagKernelNormalizer* normalizer = new CSqrtDiagKernelNormalizer(true);
SG_REF(normalizer);
for (index_t d=0; d<4; d++)
Expand All @@ -48,12 +50,12 @@ int main(int argv, char** argc)
if ((i+j)%2==0)
{
labs[i] = -1;
mat(j,i) = CMath::random(0,1) + 0.5;
mat(j, i) = dist(prng) + 0.5;
}
else
{
labs[i] = 1;
mat(j,i) = CMath::random(0,1) - 0.5;
mat(j, i) = dist(prng) - 0.5;
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/sparse_test.cpp
Expand Up @@ -115,7 +115,7 @@ int main(int argc, char** argv)
v.set_const(1.0);
Map<VectorXd> map_v(v.vector, v.vlen);
CTime time;
CMath::init_random(17);
set_global_seed(17);

SG_SPRINT("time\tshogun (s)\teigen3 (s)\n\n");
for (index_t t=0; t<times; ++t)
Expand Down
2 changes: 1 addition & 1 deletion examples/meta/generator/translate.py
Expand Up @@ -483,7 +483,7 @@ def translateExpr(self, expr):
method = expr[key][0]["Identifier"]
argsList = None
try:
argsList = expr[key][2]
argsList = expr[key][1]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi @karlnapf , I already fix the missing parameter with global function call in here. Thank you for your help :)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Well done!!! :)

except IndexError:
pass
translatedArgsList = self.translateArgumentList(argsList)
Expand Down
2 changes: 1 addition & 1 deletion examples/meta/src/clustering/gmm.sg
@@ -1,6 +1,6 @@
CSVFile f_feats_train("../../data/classifier_4class_2d_linear_features_train.dat")

Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features_train(f_feats_train)
Expand Down
2 changes: 1 addition & 1 deletion examples/meta/src/clustering/kmeans.sg
@@ -1,5 +1,5 @@
CSVFile f_feats_train("../../data/classifier_binary_2d_linear_features_train.dat")
Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features_train(f_feats_train)
Expand Down
2 changes: 1 addition & 1 deletion examples/meta/src/converter/ica_fast.sg
@@ -1,6 +1,6 @@
CSVFile f_feats("../../data/ica_2_sources.dat")

Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features(f_feats)
Expand Down
2 changes: 1 addition & 1 deletion examples/meta/src/converter/ica_ff_sep.sg
@@ -1,6 +1,6 @@
CSVFile f_feats("../../data/ica_2_sources.dat")

Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features(f_feats)
Expand Down
2 changes: 1 addition & 1 deletion examples/meta/src/converter/ica_jade.sg
@@ -1,6 +1,6 @@
CSVFile f_feats("../../data/ica_2_sources.dat")

Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features(f_feats)
Expand Down
2 changes: 1 addition & 1 deletion examples/meta/src/converter/ica_jedi_sep.sg
@@ -1,6 +1,6 @@
CSVFile f_feats("../../data/ica_2_sources.dat")

Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features(f_feats)
Expand Down
2 changes: 1 addition & 1 deletion examples/meta/src/converter/ica_sobi.sg
@@ -1,6 +1,6 @@
CSVFile f_feats("../../data/ica_2_sources.dat")

Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features(f_feats)
Expand Down
Expand Up @@ -2,7 +2,7 @@ CSVFile f_feats_train("../../data/classifier_4class_2d_linear_features_train.dat
CSVFile f_feats_test("../../data/classifier_4class_2d_linear_features_test.dat")
CSVFile f_labels_train("../../data/classifier_4class_2d_linear_labels_train.dat")
CSVFile f_labels_test("../../data/classifier_4class_2d_linear_labels_test.dat")
Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features_train(f_feats_train)
Expand Down
4 changes: 3 additions & 1 deletion examples/meta/src/meta_api/calls.sg
@@ -1,9 +1,11 @@
# static call
Math:init_random(1)
# Math:init_random(1)

# global function call
get_global_io()

set_global_seed(1)

# member function call
GaussianKernel k()
k.set_width(1)
2 changes: 1 addition & 1 deletion examples/meta/src/multiclass_classifier/cartree.sg
Expand Up @@ -2,7 +2,7 @@ CSVFile f_feats_train("../../data/classifier_4class_2d_linear_features_train.dat
CSVFile f_feats_test("../../data/classifier_4class_2d_linear_features_test.dat")
CSVFile f_labels_train("../../data/classifier_4class_2d_linear_labels_train.dat")
CSVFile f_labels_test("../../data/classifier_4class_2d_linear_labels_test.dat")
Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features_train(f_feats_train)
Expand Down
@@ -1,4 +1,4 @@
Math:init_random(1)
set_global_seed(1)

CSVFile f_feats_train("../../data/classifier_4class_2d_linear_features_train.dat")
CSVFile f_feats_test("../../data/classifier_4class_2d_linear_features_test.dat")
Expand Down
2 changes: 1 addition & 1 deletion examples/meta/src/multiclass_classifier/random_forest.sg
Expand Up @@ -2,7 +2,7 @@ CSVFile f_feats_train("../../data/classifier_4class_2d_linear_features_train.dat
CSVFile f_feats_test("../../data/classifier_4class_2d_linear_features_test.dat")
CSVFile f_labels_train("../../data/classifier_4class_2d_linear_labels_train.dat")
CSVFile f_labels_test("../../data/classifier_4class_2d_linear_labels_test.dat")
Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features_train(f_feats_train)
Expand Down
Expand Up @@ -3,7 +3,7 @@ CSVFile f_feats_test("../../data/classifier_binary_2d_nonlinear_features_test.da
CSVFile f_labels_train("../../data/classifier_binary_2d_nonlinear_labels_train.dat")
CSVFile f_labels_test("../../data/classifier_binary_2d_nonlinear_labels_test.dat")

Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features_train(f_feats_train)
Expand Down
Expand Up @@ -3,7 +3,7 @@ CSVFile f_feats_test("../../data/regression_1d_sinc_features_test_with_9d_noise.
CSVFile f_labels_train("../../data/regression_1d_sinc_labels_train.dat")
CSVFile f_labels_test("../../data/regression_1d_sinc_labels_test.dat")

Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features_train(f_feats_train)
Expand Down
2 changes: 1 addition & 1 deletion examples/meta/src/regression/random_forest_regression.sg
Expand Up @@ -2,7 +2,7 @@ CSVFile f_feats_train("../../data/regression_1d_linear_features_train.dat")
CSVFile f_feats_test("../../data/regression_1d_linear_features_test.dat")
CSVFile f_labels_train("../../data/regression_1d_linear_labels_train.dat")
CSVFile f_labels_test("../../data/regression_1d_linear_labels_test.dat")
Math:init_random(1)
set_global_seed(1)

#![create_features]
RealFeatures features_train(f_feats_train)
Expand Down
6 changes: 4 additions & 2 deletions examples/undocumented/libshogun/classifier_larank.cpp
Expand Up @@ -27,13 +27,15 @@ void test()
SGMatrix<float64_t> matrix_test(num_class, num_vec);
CMulticlassLabels* labels=new CMulticlassLabels(num_vec);
CMulticlassLabels* labels_test=new CMulticlassLabels(num_vec);
auto prng = get_prng();
std::normal_distribution<float64_t> dist(0, 1);
for (index_t i=0; i<num_vec; ++i)
{
index_t label=i%num_class;
for (index_t j=0; j<num_feat; ++j)
{
matrix(j,i)=CMath::randn_double();
matrix_test(j,i)=CMath::randn_double();
matrix(j, i) = dist(prng);
matrix_test(j, i) = dist(prng);
labels->set_label(i, label);
labels_test->set_label(i, label);
}
Expand Down
9 changes: 5 additions & 4 deletions examples/undocumented/libshogun/classifier_latent_svm.cpp
Expand Up @@ -110,7 +110,7 @@ static void read_dataset(char* fname, CLatentFeatures*& feats, CLatentLabels*& l
SG_REF(labels);

CBinaryLabels* ys = new CBinaryLabels(num_examples);

auto prng = get_prng();
feats = new CLatentFeatures(num_examples);
SG_REF(feats);

Expand Down Expand Up @@ -144,10 +144,11 @@ static void read_dataset(char* fname, CLatentFeatures*& feats, CLatentLabels*& l
while ((*pchar)!='\n') pchar++;
*pchar = '\0';
height = atoi(last_pchar);

std::uniform_int_distribution<index_t> dist_w(0, width - 1);
std::uniform_int_distribution<index_t> dist_h(0, height - 1);
/* create latent label */
int x = CMath::random(0, width-1);
int y = CMath::random(0, height-1);
int x = dist_w(prng);
int y = dist_h(prng);
CBoundingBox* bb = new CBoundingBox(x,y);
labels->add_latent_label(bb);

Expand Down
44 changes: 23 additions & 21 deletions examples/undocumented/libshogun/classifier_libsvm_probabilities.cpp
Expand Up @@ -10,27 +10,29 @@ using namespace shogun;
//generates data points (of different classes) randomly
void gen_rand_data(SGMatrix<float64_t> features, SGVector<float64_t> labels, float64_t distance)
{
index_t num_samples=labels.vlen;
index_t dimensions=features.num_rows;
for (int32_t i=0; i<num_samples; i++)
{
if (i<num_samples/2)
{
labels[i]=-1.0;
for(int32_t j=0; j<dimensions; j++)
features(j,i)=CMath::random(0.0,1.0)+distance;
}
else
{
labels[i]=1.0;
for(int32_t j=0; j<dimensions; j++)
features(j,i)=CMath::random(0.0,1.0)-distance;
}
}
labels.display_vector("labels");
std::cout<<std::endl;
features.display_matrix("features");
std::cout<<std::endl;
auto prng = get_prng();
std::uniform_real_distribution<float64_t> dist(0, 1.0);
index_t num_samples = labels.vlen;
index_t dimensions = features.num_rows;
for (int32_t i = 0; i < num_samples; i++)
{
if (i < num_samples / 2)
{
labels[i] = -1.0;
for (int32_t j = 0; j < dimensions; j++)
features(j, i) = dist(prng) + distance;
}
else
{
labels[i] = 1.0;
for (int32_t j = 0; j < dimensions; j++)
features(j, i) = dist(prng) - distance;
}
}
labels.display_vector("labels");
std::cout << std::endl;
features.display_matrix("features");
std::cout << std::endl;
}

int main(int argc, char** argv)
Expand Down
Expand Up @@ -68,8 +68,10 @@ void test()
/* create some data and labels */
SGMatrix<float64_t> matrix(dim_vectors, num_vectors);
CBinaryLabels* labels=new CBinaryLabels(num_vectors);
auto prng = get_prng();
std::normal_distribution<float64_t> dist(0, 1);
for (int32_t i=0; i<num_vectors*dim_vectors; i++)
matrix.matrix[i]=CMath::randn_double();
matrix.matrix[i] = dist(prng);

/* create num_feautres 2-dimensional vectors */
CDenseFeatures<float64_t>* features=new CDenseFeatures<float64_t>();
Expand Down
Expand Up @@ -30,24 +30,26 @@ void test_svmlight()
float64_t p_x=0.5; // probability for class A
float64_t mostly_prob=0.8;
CDenseLabels* labels=new CBinaryLabels(num_train+num_test);
CMath::init_random(17);
auto prng = get_prng();
std::uniform_real_distribution<float64_t> dist_real(0.0, 1.0);
std::uniform_int_distribution<index_t> dist_int(1, max_length);

SGStringList<char> data(num_train+num_test, max_length);
for (index_t i=0; i<num_train+num_test; ++i)
{
/* determine length */
index_t length=CMath::random(1, max_length);
index_t length = dist_int(prng);

/* allocate string */
data.strings[i]=SGString<char>(length);

/* fill with elements and set label */
if (p_x<CMath::random(0.0, 1.0))
if (p_x < dist_real(prng))
{
labels->set_label(i, 1);
for (index_t j=0; j<length; ++j)
{
char c=mostly_prob<CMath::random(0.0, 1.0) ? '0' : '1';
char c = mostly_prob < dist_real(prng) ? '0' : '1';
data.strings[i].string[j]=c;
}
}
Expand All @@ -56,7 +58,7 @@ void test_svmlight()
labels->set_label(i, -1);
for (index_t j=0; j<length; ++j)
{
char c=mostly_prob<CMath::random(0.0, 1.0) ? '1' : '0';
char c = mostly_prob < dist_real(prng) ? '1' : '0';
data.strings[i].string[j]=c;
}
}
Expand Down
5 changes: 4 additions & 1 deletion examples/undocumented/libshogun/clustering_kmeans.cpp
Expand Up @@ -39,6 +39,7 @@ int main(int argc, char **argv)
int32_t dim_features=3;
int32_t num_vectors_per_cluster=5;
float64_t cluster_std_dev=2.0;
auto prng = get_prng();

/* build random cluster centers */
SGMatrix<float64_t> cluster_centers(dim_features, num_clusters);
Expand All @@ -59,7 +60,9 @@ int main(int argc, char **argv)
idx+=j;
idx+=k*dim_features;
float64_t entry=cluster_centers.matrix[i*dim_features+j];
data.matrix[idx]=CMath::normal_random(entry, cluster_std_dev);
std::normal_distribution<float64_t> dist(
entry, cluster_std_dev);
data.matrix[idx] = dist(prng);
}
}
}
Expand Down