Skip to content

Commit

Permalink
restore some changed seed
Browse files Browse the repository at this point in the history
  • Loading branch information
MikeLing committed Jul 19, 2017
1 parent 2d3324e commit fc97d5d
Show file tree
Hide file tree
Showing 12 changed files with 19 additions and 19 deletions.
2 changes: 1 addition & 1 deletion src/shogun/base/DynArray.h
Expand Up @@ -448,7 +448,7 @@ template <class T> class DynArray
/** randomizes the array (not thread safe!) */
void shuffle()
{
auto m_rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto m_rng = std::unique_ptr<CRandom>(new CRandom());
for (index_t i=0; i<=current_num_elements-1; ++i)
CMath::swap(
array[i],
Expand Down
2 changes: 1 addition & 1 deletion src/shogun/clustering/KMeansMiniBatch.cpp
Expand Up @@ -131,7 +131,7 @@ SGVector<int32_t> CKMeansMiniBatch::mbchoose_rand(int32_t b, int32_t num)
{
SGVector<int32_t> chosen=SGVector<int32_t>(num);
SGVector<int32_t> ret=SGVector<int32_t>(b);
auto rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto rng = std::unique_ptr<CRandom>(new CRandom());
chosen.zero();
int32_t ch=0;
while (ch<b)
Expand Down
6 changes: 3 additions & 3 deletions src/shogun/features/DataGenerator.cpp
Expand Up @@ -33,7 +33,7 @@ SGMatrix<float64_t> CDataGenerator::generate_checkboard_data(int32_t num_classes
int32_t dim, int32_t num_points, float64_t overlap)
{
int32_t points_per_class = num_points / num_classes;
auto m_rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto m_rng = std::unique_ptr<CRandom>(new CRandom());

int32_t grid_size = (int32_t ) CMath::ceil(CMath::sqrt((float64_t ) num_classes));
float64_t cell_size = (float64_t ) 1 / grid_size;
Expand Down Expand Up @@ -88,7 +88,7 @@ SGMatrix<float64_t> CDataGenerator::generate_mean_data(index_t m,
/* evtl. allocate space */
SGMatrix<float64_t> result=SGMatrix<float64_t>::get_allocated_matrix(
dim, 2*m, target);
auto m_rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto m_rng = std::unique_ptr<CRandom>(new CRandom());

/* fill matrix with normal data */
for (index_t i=0; i<2*m; ++i)
Expand All @@ -110,7 +110,7 @@ SGMatrix<float64_t> CDataGenerator::generate_sym_mix_gauss(index_t m,
/* evtl. allocate space */
SGMatrix<float64_t> result=SGMatrix<float64_t>::get_allocated_matrix(
2, m, target);
auto m_rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto m_rng = std::unique_ptr<CRandom>(new CRandom());
/* rotation matrix */
SGMatrix<float64_t> rot=SGMatrix<float64_t>(2,2);
rot(0, 0)=CMath::cos(angle);
Expand Down
2 changes: 1 addition & 1 deletion src/shogun/lib/SGVector.cpp
Expand Up @@ -614,7 +614,7 @@ void SGVector<float32_t>::vec1_plus_scalar_times_vec2(float32_t* vec1,
template <class T>
void SGVector<T>::random_vector(T* vec, int32_t len, T min_value, T max_value)
{
auto m_rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto m_rng = std::unique_ptr<CRandom>(new CRandom());
for (int32_t i=0; i<len; i++)
vec[i] = m_rng->random(min_value, max_value);
}
Expand Down
2 changes: 1 addition & 1 deletion src/shogun/mathematics/Math.h
Expand Up @@ -1027,7 +1027,7 @@ class CMath : public CSGObject
else
{
auto m_rng =
std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
std::unique_ptr<CRandom>(new CRandom());
for (index_t i = 0; i < v.vlen; ++i)
swap(v[i], v[m_rng->random(i, v.vlen - 1)]);
}
Expand Down
6 changes: 3 additions & 3 deletions src/shogun/mathematics/Statistics.cpp
Expand Up @@ -325,7 +325,7 @@ SGVector<int32_t> CStatistics::sample_indices(int32_t sample_size, int32_t N)
int32_t* idxs=SG_MALLOC(int32_t,N);
int32_t i, rnd;
int32_t* permuted_idxs=SG_MALLOC(int32_t,sample_size);
auto rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto rng = std::unique_ptr<CRandom>(new CRandom());

// reservoir sampling
for (i=0; i<N; i++)
Expand Down Expand Up @@ -712,7 +712,7 @@ SGMatrix<float64_t> CStatistics::sample_from_gaussian(SGVector<float64_t> mean,
int32_t dim=mean.vlen;
Map<VectorXd> mu(mean.vector, mean.vlen);
Map<MatrixXd> c(cov.matrix, cov.num_rows, cov.num_cols);
auto rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto rng = std::unique_ptr<CRandom>(new CRandom());

// generate samples, z, from N(0, I), DxN
SGMatrix<float64_t> S(dim, N);
Expand Down Expand Up @@ -775,7 +775,7 @@ SGMatrix<float64_t> CStatistics::sample_from_gaussian(SGVector<float64_t> mean,

typedef SparseMatrix<float64_t> MatrixType;
const MatrixType &c=EigenSparseUtil<float64_t>::toEigenSparse(cov);
auto rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto rng = std::unique_ptr<CRandom>(new CRandom());

SimplicialLLT<MatrixType> llt;

Expand Down
2 changes: 1 addition & 1 deletion src/shogun/mathematics/ajd/QDiag.cpp
Expand Up @@ -16,7 +16,7 @@ SGMatrix<float64_t> CQDiag::diagonalize(SGNDArray<float64_t> C, SGMatrix<float64
int T = C.dims[2];

SGMatrix<float64_t> V;
auto rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto rng = std::unique_ptr<CRandom>(new CRandom());
if (V0.num_rows == N && V0.num_cols == N)
{
V = V0.clone();
Expand Down
2 changes: 1 addition & 1 deletion src/shogun/multiclass/LaRank.h
Expand Up @@ -250,7 +250,7 @@ namespace shogun
LaRankPattern & sample ()
{
auto m_rng =
std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
std::unique_ptr<CRandom>(new CRandom());
ASSERT(!empty())
while (true)
{
Expand Down
2 changes: 1 addition & 1 deletion src/shogun/optimization/liblinear/shogun_liblinear.cpp
Expand Up @@ -512,7 +512,7 @@ void Solver_MCSVM_CS::solve()
}
state->inited = true;
}
auto m_rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto m_rng = std::unique_ptr<CRandom>(new CRandom());
while(iter < max_iter && !CSignal::cancel_computations())
{
double stopping = -CMath::INFTY;
Expand Down
2 changes: 1 addition & 1 deletion src/shogun/structure/TwoStateModel.cpp
Expand Up @@ -269,7 +269,7 @@ CHMSVMModel* CTwoStateModel::simulate_data(int32_t num_exm, int32_t exm_len,
SGVector< int32_t > ll(num_exm*exm_len);
ll.zero();
int32_t rnb, rl, rp;
auto m_rng = std::unique_ptr<CRandom>(new CRandom(sg_random_seed));
auto m_rng = std::unique_ptr<CRandom>(new CRandom());
for ( int32_t i = 0 ; i < num_exm ; ++i)
{
SGVector< int32_t > lab(exm_len);
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/neuralnets/NeuralNetwork_unittest.cc
Expand Up @@ -56,7 +56,7 @@ TEST(NeuralNetwork, backpropagation_linear)
{
float64_t tolerance = 1e-9;

set_global_seed(100);
set_global_seed(10);

CDynamicObjectArray* layers = new CDynamicObjectArray();
layers->append_element(new CNeuralInputLayer(5));
Expand Down Expand Up @@ -88,7 +88,7 @@ TEST(NeuralNetwork, neural_layers_builder)
{
float64_t tolerance = 1e-9;

set_global_seed(100);
set_global_seed(10);

CNeuralLayers* layers = new CNeuralLayers();
layers->input(5)
Expand Down Expand Up @@ -123,7 +123,7 @@ TEST(NeuralNetwork, backpropagation_logistic)
{
float64_t tolerance = 1e-9;

set_global_seed(100);
set_global_seed(10);

CDynamicObjectArray* layers = new CDynamicObjectArray();
layers->append_element(new CNeuralInputLayer(5));
Expand Down Expand Up @@ -155,7 +155,7 @@ TEST(NeuralNetwork, backpropagation_softmax)
{
float64_t tolerance = 1e-9;

set_global_seed(100);
set_global_seed(10);

CDynamicObjectArray* layers = new CDynamicObjectArray();
layers->append_element(new CNeuralInputLayer(5));
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/neuralnets/RBM_unittest.cc
Expand Up @@ -84,7 +84,7 @@ TEST(RBM, gibbs_sampling)

TEST(RBM, free_energy_binary)
{
set_global_seed(10);
set_global_seed(100);

int32_t num_visible = 5;
int32_t num_hidden = 6;
Expand Down

0 comments on commit fc97d5d

Please sign in to comment.