Skip to content

Commit

Permalink
Modernize code (clang-tidy check modernize-loop-convert)
Browse files Browse the repository at this point in the history
Signed-off-by: Stefan Weil <sw@weilnetz.de>
  • Loading branch information
stweil committed Apr 5, 2019
1 parent fab9a54 commit 20d5eed
Show file tree
Hide file tree
Showing 16 changed files with 84 additions and 96 deletions.
22 changes: 7 additions & 15 deletions src/api/hocrrenderer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -293,13 +293,11 @@ char* TessBaseAPI::GetHOCRText(ETEXT_DESC* monitor, int page_number) {
if (bold) hocr_str << "</strong>";
// If the lstm choice mode is required it is added here
if (tesseract_->lstm_choice_mode == 1 && rawTimestepMap != nullptr) {
for (size_t i = 0; i < rawTimestepMap->size(); i++) {
for (auto timestep : *rawTimestepMap) {
hocr_str << "\n <span class='ocrx_cinfo'"
<< " id='"
<< "timestep_" << page_id << "_" << wcnt << "_" << tcnt << "'"
<< ">";
std::vector<std::pair<const char*, float>> timestep =
(*rawTimestepMap)[i];
for (std::pair<const char*, float> conf : timestep) {
hocr_str << "<span class='ocr_glyph'"
<< " id='"
Expand All @@ -312,43 +310,37 @@ char* TessBaseAPI::GetHOCRText(ETEXT_DESC* monitor, int page_number) {
tcnt++;
}
} else if (tesseract_->lstm_choice_mode == 2 && choiceMap != nullptr) {
for (size_t i = 0; i < choiceMap->size(); i++) {
std::vector<std::pair<const char*, float>> timestep =
(*choiceMap)[i];
for (auto timestep : *choiceMap) {
if (timestep.size() > 0) {
hocr_str << "\n <span class='ocrx_cinfo'"
<< " id='"
<< "lstm_choices_" << page_id << "_" << wcnt << "_" << tcnt
<< "'>";
for (size_t j = 0; j < timestep.size(); j++) {
for (auto & j : timestep) {
hocr_str << "<span class='ocr_glyph'"
<< " id='"
<< "choice_" << page_id << "_" << wcnt << "_" << gcnt
<< "'"
<< " title='x_confs " << int(timestep[j].second * 100)
<< "'>" << timestep[j].first << "</span>";
<< " title='x_confs " << int(j.second * 100)
<< "'>" << j.first << "</span>";
gcnt++;
}
hocr_str << "</span>";
tcnt++;
}
}
} else if (tesseract_->lstm_choice_mode == 3 && symbolMap != nullptr) {
for (size_t j = 0; j < symbolMap->size(); j++) {
std::vector<std::vector<std::pair<const char*, float>>> timesteps =
(*symbolMap)[j];
for (auto timesteps : *symbolMap) {
hocr_str << "\n <span class='ocr_symbol'"
<< " id='"
<< "symbol_" << page_id << "_" << wcnt << "_" << scnt
<< "'>";
for (size_t i = 0; i < timesteps.size(); i++) {
for (auto timestep : timesteps) {
hocr_str << "\n <span class='ocrx_cinfo'"
<< " id='"
<< "timestep_" << page_id << "_" << wcnt << "_" << tcnt
<< "'"
<< ">";
std::vector<std::pair<const char*, float>> timestep =
timesteps[i];
for (std::pair<const char*, float> conf : timestep) {
hocr_str << "<span class='ocr_glyph'"
<< " id='"
Expand Down
16 changes: 8 additions & 8 deletions src/ccutil/tessdatamanager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -183,17 +183,17 @@ void TessdataManager::Serialize(GenericVector<char> *data) const {
fp.OpenWrite(data);
fp.Serialize(&num_entries);
fp.Serialize(&offset_table[0], countof(offset_table));
for (unsigned i = 0; i < TESSDATA_NUM_ENTRIES; ++i) {
if (!entries_[i].empty()) {
fp.Serialize(&entries_[i][0], entries_[i].size());
for (const auto& entry : entries_) {
if (!entry.empty()) {
fp.Serialize(&entry[0], entry.size());
}
}
}

// Resets to the initial state, keeping the reader.
void TessdataManager::Clear() {
for (unsigned i = 0; i < TESSDATA_NUM_ENTRIES; ++i) {
entries_[i].clear();
for (auto& entry : entries_) {
entry.clear();
}
is_loaded_ = false;
}
Expand Down Expand Up @@ -245,11 +245,11 @@ bool TessdataManager::CombineDataFiles(
const char *language_data_path_prefix,
const char *output_filename) {
// Load individual tessdata components from files.
for (unsigned i = 0; i < TESSDATA_NUM_ENTRIES; ++i) {
for (auto filesuffix : kTessdataFileSuffixes) {
TessdataType type;
ASSERT_HOST(TessdataTypeFromFileSuffix(kTessdataFileSuffixes[i], &type));
ASSERT_HOST(TessdataTypeFromFileSuffix(filesuffix, &type));
STRING filename = language_data_path_prefix;
filename += kTessdataFileSuffixes[i];
filename += filesuffix;
FILE *fp = fopen(filename.string(), "rb");
if (fp != nullptr) {
fclose(fp);
Expand Down
10 changes: 5 additions & 5 deletions src/classify/cluster.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -405,8 +405,8 @@ MakeClusterer (int16_t SampleSize, const PARAM_DESC ParamDesc[]) {

// Initialize cache of histogram buckets to minimize recomputing them.
for (auto & d : Clusterer->bucket_cache) {
for (int c = 0; c < MAXBUCKETS + 1 - MINBUCKETS; ++c)
d[c] = nullptr;
for (auto & c : d)
c = nullptr;
}

return Clusterer;
Expand Down Expand Up @@ -520,9 +520,9 @@ void FreeClusterer(CLUSTERER *Clusterer) {
FreeCluster (Clusterer->Root);
// Free up all used buckets structures.
for (auto & d : Clusterer->bucket_cache) {
for (int c = 0; c < MAXBUCKETS + 1 - MINBUCKETS; ++c)
if (d[c] != nullptr)
FreeBuckets(d[c]);
for (auto & c : d)
if (c != nullptr)
FreeBuckets(c);
}

free(Clusterer);
Expand Down
12 changes: 6 additions & 6 deletions src/lstm/lstm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ void LSTM::Forward(bool debug, const NetworkIO& input,
ResizeForward(input);
// Temporary storage of forward computation for each gate.
NetworkScratch::FloatVec temp_lines[WT_COUNT];
for (int i = 0; i < WT_COUNT; ++i) temp_lines[i].Init(ns_, scratch);
for (auto & temp_line : temp_lines) temp_line.Init(ns_, scratch);
// Single timestep buffers for the current/recurrent output and state.
NetworkScratch::FloatVec curr_state, curr_output;
curr_state.Init(ns_, scratch);
Expand Down Expand Up @@ -455,7 +455,7 @@ bool LSTM::Backward(bool debug, const NetworkIO& fwd_deltas,
ZeroVector<double>(na_, curr_sourceerr);
// Errors in the gates.
NetworkScratch::FloatVec gate_errors[WT_COUNT];
for (int g = 0; g < WT_COUNT; ++g) gate_errors[g].Init(ns_, scratch);
for (auto & gate_error : gate_errors) gate_error.Init(ns_, scratch);
// Rotating buffers of width buf_width allow storage of the recurrent time-
// steps used only for true 2-D. Stores one full strip of the major direction.
int buf_width = Is2D() ? input_map_.Size(FD_WIDTH) : 1;
Expand All @@ -472,13 +472,13 @@ bool LSTM::Backward(bool debug, const NetworkIO& fwd_deltas,
}
// Parallel-generated sourceerr from each of the gates.
NetworkScratch::FloatVec sourceerr_temps[WT_COUNT];
for (int w = 0; w < WT_COUNT; ++w)
sourceerr_temps[w].Init(na_, scratch);
for (auto & sourceerr_temp : sourceerr_temps)
sourceerr_temp.Init(na_, scratch);
int width = input_width_;
// Transposed gate errors stored over all timesteps for sum outer.
NetworkScratch::GradientStore gate_errors_t[WT_COUNT];
for (int w = 0; w < WT_COUNT; ++w) {
gate_errors_t[w].Init(ns_, width, scratch);
for (auto & w : gate_errors_t) {
w.Init(ns_, width, scratch);
}
// Used only if softmax_ != nullptr.
NetworkScratch::FloatVec softmax_errors;
Expand Down
8 changes: 4 additions & 4 deletions src/lstm/lstmtrainer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -435,8 +435,8 @@ bool LSTMTrainer::Serialize(SerializeAmount serialize_amount,
if (!fp->Serialize(&prev_sample_iteration_)) return false;
if (!fp->Serialize(&perfect_delay_)) return false;
if (!fp->Serialize(&last_perfect_training_iteration_)) return false;
for (int i = 0; i < ET_COUNT; ++i) {
if (!error_buffers_[i].Serialize(fp)) return false;
for (const auto & error_buffer : error_buffers_) {
if (!error_buffer.Serialize(fp)) return false;
}
if (!fp->Serialize(&error_rates_[0], countof(error_rates_))) return false;
if (!fp->Serialize(&training_stage_)) return false;
Expand Down Expand Up @@ -479,8 +479,8 @@ bool LSTMTrainer::DeSerialize(const TessdataManager* mgr, TFile* fp) {
if (!fp->DeSerialize(&prev_sample_iteration_)) return false;
if (!fp->DeSerialize(&perfect_delay_)) return false;
if (!fp->DeSerialize(&last_perfect_training_iteration_)) return false;
for (int i = 0; i < ET_COUNT; ++i) {
if (!error_buffers_[i].DeSerialize(fp)) return false;
for (auto & error_buffer : error_buffers_) {
if (!error_buffer.DeSerialize(fp)) return false;
}
if (!fp->DeSerialize(&error_rates_[0], countof(error_rates_))) return false;
if (!fp->DeSerialize(&training_stage_)) return false;
Expand Down
15 changes: 7 additions & 8 deletions src/lstm/recodebeam.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -262,22 +262,21 @@ void RecodeBeamSearch::ExtractBestPathAsWords(const TBOX& line_box,
if ((best_choices_acc.size() > 0 && i == std::get<1>(best_choices_acc.front()) - 1)
|| i == xcoords[word_end]-1) {
std::map<const char*, float> summed_propabilities;
for (auto it = choice_pairs.begin(); it != choice_pairs.end(); ++it) {
summed_propabilities[it->first] += it->second;
for (auto & choice_pair : choice_pairs) {
summed_propabilities[choice_pair.first] += choice_pair.second;
}
std::vector<std::pair<const char*, float>> accumulated_timestep;
for (auto it = summed_propabilities.begin();
it != summed_propabilities.end(); ++it) {
for (auto& summed_propability : summed_propabilities) {
if(sum == 0) break;
it->second/=sum;
summed_propability.second/=sum;
size_t pos = 0;
while (accumulated_timestep.size() > pos
&& accumulated_timestep[pos].second > it->second) {
&& accumulated_timestep[pos].second > summed_propability.second) {
pos++;
}
accumulated_timestep.insert(accumulated_timestep.begin() + pos,
std::pair<const char*,float>(it->first,
it->second));
std::pair<const char*,float>(summed_propability.first,
summed_propability.second));
}
if (best_choices_acc.size() > 0) {
best_choices_acc.pop_front();
Expand Down
8 changes: 4 additions & 4 deletions src/lstm/recodebeam.h
Original file line number Diff line number Diff line change
Expand Up @@ -245,12 +245,12 @@ class RecodeBeamSearch {
struct RecodeBeam {
// Resets to the initial state without deleting all the memory.
void Clear() {
for (int i = 0; i < kNumBeams; ++i) {
beams_[i].clear();
for (auto & beam : beams_) {
beam.clear();
}
RecodeNode empty;
for (int i = 0; i < NC_COUNT; ++i) {
best_initial_dawgs_[i] = empty;
for (auto & best_initial_dawg : best_initial_dawgs_) {
best_initial_dawg = empty;
}
}

Expand Down
5 changes: 2 additions & 3 deletions src/lstm/stridemap.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
// File: stridemap.cpp
// Description: Indexing into a 4-d tensor held in a 2-d Array.
// Author: Ray Smith
// Created: Fri Sep 20 15:30:31 PST 2016
//
// (C) Copyright 2016, Google Inc.
// Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -24,8 +23,8 @@ namespace tesseract {
// Returns true if *this is a valid index.
bool StrideMap::Index::IsValid() const {
// Cheap check first.
for (int d = 0; d < FD_DIMSIZE; ++d) {
if (indices_[d] < 0) return false;
for (int index : indices_) {
if (index < 0) return false;
}
for (int d = 0; d < FD_DIMSIZE; ++d) {
if (indices_[d] > MaxIndexOfDim(static_cast<FlexDimensions>(d)))
Expand Down
42 changes: 21 additions & 21 deletions src/textord/cjkpitch.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -900,7 +900,7 @@ class FPAnalyzer {
~FPAnalyzer() { }

void Pass1Analyze() {
for (size_t i = 0; i < rows_.size(); i++) rows_[i].Pass1Analyze();
for (auto & row : rows_) row.Pass1Analyze();
}

// Estimate character pitch for each row. The argument pass1 can be
Expand All @@ -915,25 +915,25 @@ class FPAnalyzer {
}

void MergeFragments() {
for (size_t i = 0; i < rows_.size(); i++) rows_[i].MergeFragments();
for (auto & row : rows_) row.MergeFragments();
}

void FinalizeLargeChars() {
for (size_t i = 0; i < rows_.size(); i++) rows_[i].FinalizeLargeChars();
for (auto & row : rows_) row.FinalizeLargeChars();
}

bool Pass2Analyze() {
bool changed = false;
for (size_t i = 0; i < rows_.size(); i++) {
if (rows_[i].Pass2Analyze()) {
for (auto & row : rows_) {
if (row.Pass2Analyze()) {
changed = true;
}
}
return changed;
}

void OutputEstimations() {
for (size_t i = 0; i < rows_.size(); i++) rows_[i].OutputEstimations();
for (auto & row : rows_) row.OutputEstimations();
// Don't we need page-level estimation of gaps/spaces?
}

Expand Down Expand Up @@ -1001,36 +1001,36 @@ void FPAnalyzer::EstimatePitch(bool pass1) {
num_tall_rows_ = 0;
num_bad_rows_ = 0;
pitch_height_stats.Clear();
for (size_t i = 0; i < rows_.size(); i++) {
rows_[i].EstimatePitch(pass1);
if (rows_[i].good_pitches()) {
pitch_height_stats.Add(rows_[i].height() + rows_[i].gap(),
rows_[i].pitch(), rows_[i].good_pitches());
if (rows_[i].height_pitch_ratio() > 1.1) num_tall_rows_++;
for (auto & row : rows_) {
row.EstimatePitch(pass1);
if (row.good_pitches()) {
pitch_height_stats.Add(row.height() + row.gap(),
row.pitch(), row.good_pitches());
if (row.height_pitch_ratio() > 1.1) num_tall_rows_++;
} else {
num_bad_rows_++;
}
}

pitch_height_stats.Finish();
for (size_t i = 0; i < rows_.size(); i++) {
if (rows_[i].good_pitches() >= 5) {
for (auto & row : rows_) {
if (row.good_pitches() >= 5) {
// We have enough evidences. Just use the pitch estimation
// from this row.
rows_[i].set_estimated_pitch(rows_[i].pitch());
} else if (rows_[i].num_chars() > 1) {
row.set_estimated_pitch(row.pitch());
} else if (row.num_chars() > 1) {
float estimated_pitch =
pitch_height_stats.EstimateYFor(rows_[i].height() + rows_[i].gap(),
pitch_height_stats.EstimateYFor(row.height() + row.gap(),
0.1);
// CJK characters are more likely to be fragmented than poorly
// chopped. So trust the page-level estimation of character
// pitch only if it's larger than row-level estimation or
// row-level estimation is too large (2x bigger than row height).
if (estimated_pitch > rows_[i].pitch() ||
rows_[i].pitch() > rows_[i].height() * 2.0) {
rows_[i].set_estimated_pitch(estimated_pitch);
if (estimated_pitch > row.pitch() ||
row.pitch() > row.height() * 2.0) {
row.set_estimated_pitch(estimated_pitch);
} else {
rows_[i].set_estimated_pitch(rows_[i].pitch());
row.set_estimated_pitch(row.pitch());
}
}
}
Expand Down
5 changes: 2 additions & 3 deletions src/textord/colpartition.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
// Description: Class to hold partitions of the page that correspond
// roughly to text lines.
// Author: Ray Smith
// Created: Thu Aug 14 10:54:01 PDT 2008
//
// (C) Copyright 2008, Google Inc.
// Licensed under the Apache License, Version 2.0 (the "License");
Expand Down Expand Up @@ -602,8 +601,8 @@ void ColPartition::ComputeSpecialBlobsDensity() {
special_blobs_densities_[type]++;
}

for (int type = 0; type < BSTT_COUNT; ++type) {
special_blobs_densities_[type] /= boxes_.length();
for (float& special_blobs_density : special_blobs_densities_) {
special_blobs_density /= boxes_.length();
}
}

Expand Down
10 changes: 5 additions & 5 deletions src/textord/strokewidth.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -714,8 +714,8 @@ void StrokeWidth::AccumulateOverlaps(const BLOBNBOX* not_this, bool debug,
// the search is over, and at this point the final bbox must not overlap
// any of the nearests.
BLOBNBOX* nearests[BND_COUNT];
for (int i = 0; i < BND_COUNT; ++i) {
nearests[i] = nullptr;
for (auto & nearest : nearests) {
nearest = nullptr;
}
int x = (bbox->left() + bbox->right()) / 2;
int y = (bbox->bottom() + bbox->top()) / 2;
Expand Down Expand Up @@ -774,9 +774,9 @@ void StrokeWidth::AccumulateOverlaps(const BLOBNBOX* not_this, bool debug,
break;
}
// Final overlap with a nearest is not allowed.
for (int dir = 0; dir < BND_COUNT; ++dir) {
if (nearests[dir] == nullptr) continue;
const TBOX& nbox = nearests[dir]->bounding_box();
for (auto & nearest : nearests) {
if (nearest == nullptr) continue;
const TBOX& nbox = nearest->bounding_box();
if (debug) {
tprintf("Testing for overlap with:");
nbox.print();
Expand Down
Loading

0 comments on commit 20d5eed

Please sign in to comment.