Skip to content

Commit

Permalink
goes from C-style loop variable to C++-style for trainAuto
Browse files Browse the repository at this point in the history
  • Loading branch information
r2d3 committed Aug 8, 2017
1 parent 8762947 commit ec05a87
Showing 1 changed file with 15 additions and 19 deletions.
34 changes: 15 additions & 19 deletions modules/ml/src/svm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1671,19 +1671,17 @@ class SVMImpl : public SVM
Mat temp_train_responses(train_sample_count, 1, rtype);
Mat temp_test_responses;

int i,j,k,p;

for( p = range.start; p < range.end; p++ )
for( int p = range.start; p < range.end; p++ )
{
svm->setParams(parameters[p]);

double error = 0;
for( k = 0; k < k_fold; k++ )
for( int k = 0; k < k_fold; k++ )
{
int start = (k*sample_count + k_fold/2)/k_fold;
for( i = 0; i < train_sample_count; i++ )
for( int i = 0; i < train_sample_count; i++ )
{
j = sidx[(i+start)%sample_count];
int j = sidx[(i+start)%sample_count];
memcpy(temp_train_samples.ptr(i), samples.ptr(j), sample_size);
if( is_classification )
temp_train_responses.at<int>(i) = responses.at<int>(j);
Expand All @@ -1695,17 +1693,17 @@ class SVMImpl : public SVM
if( !svm->do_train( temp_train_samples, temp_train_responses ))
continue;

for( i = 0; i < test_sample_count; i++ )
for( int i = 0; i < test_sample_count; i++ )
{
j = sidx[(i+start+train_sample_count) % sample_count];
int j = sidx[(i+start+train_sample_count) % sample_count];
memcpy(temp_test_samples.ptr(i), samples.ptr(j), sample_size);
}

svm->predict(temp_test_samples, temp_test_responses, 0);
for( i = 0; i < test_sample_count; i++ )
for( int i = 0; i < test_sample_count; i++ )
{
float val = temp_test_responses.at<float>(i);
j = sidx[(i+start+train_sample_count) % sample_count];
int j = sidx[(i+start+train_sample_count) % sample_count];
if( is_classification )
error += (float)(val != responses.at<int>(j));
else
Expand Down Expand Up @@ -1812,10 +1810,8 @@ class SVMImpl : public SVM
vector<int> sidx;
setRangeVector(sidx, sample_count);

int i, k;

// randomly permute training samples
for( i = 0; i < sample_count; i++ )
for( int i = 0; i < sample_count; i++ )
{
int i1 = rng.uniform(0, sample_count);
int i2 = rng.uniform(0, sample_count);
Expand All @@ -1829,7 +1825,7 @@ class SVMImpl : public SVM
// between the k_fold parts.
vector<int> sidx0, sidx1;

for( i = 0; i < sample_count; i++ )
for( int i = 0; i < sample_count; i++ )
{
if( responses.at<int>(sidx[i]) == 0 )
sidx0.push_back(sidx[i]);
Expand All @@ -1840,15 +1836,15 @@ class SVMImpl : public SVM
int n0 = (int)sidx0.size(), n1 = (int)sidx1.size();
int a0 = 0, a1 = 0;
sidx.clear();
for( k = 0; k < k_fold; k++ )
for( int k = 0; k < k_fold; k++ )
{
int b0 = ((k+1)*n0 + k_fold/2)/k_fold, b1 = ((k+1)*n1 + k_fold/2)/k_fold;
int a = (int)sidx.size(), b = a + (b0 - a0) + (b1 - a1);
for( i = a0; i < b0; i++ )
for( int i = a0; i < b0; i++ )
sidx.push_back(sidx0[i]);
for( i = a1; i < b1; i++ )
for( int i = a1; i < b1; i++ )
sidx.push_back(sidx1[i]);
for( i = 0; i < (b - a); i++ )
for( int i = 0; i < (b - a); i++ )
{
int i1 = rng.uniform(a, b);
int i2 = rng.uniform(a, b);
Expand Down Expand Up @@ -1882,7 +1878,7 @@ class SVMImpl : public SVM
// Extract the best parameters
SvmParams best_params = params;
double min_error = FLT_MAX;
for( i = 0; i < (int)result.size(); i++ )
for( int i = 0; i < (int)result.size(); i++ )
{
if( result[i] < min_error )
{
Expand Down

0 comments on commit ec05a87

Please sign in to comment.