Skip to content

Commit

Permalink
Fix CID 1164703 (Untrusted value as argument)
Browse files Browse the repository at this point in the history
Wrong file data could give a large value for the number of vector elements
resulting in very large memory allocations.

Limit the allowed data range to UINT16_MAX (65535) elements
which hopefully should be sufficient for all use cases.

Changing the data type of the related member variables from int to
uint32_t allowed removing several type casts.

Signed-off-by: Stefan Weil <sw@weilnetz.de>
  • Loading branch information
stweil committed Jul 6, 2018
1 parent a078ce0 commit 92e2ad0
Show file tree
Hide file tree
Showing 7 changed files with 30 additions and 30 deletions.
6 changes: 4 additions & 2 deletions src/ccutil/genericvector.h
Expand Up @@ -564,12 +564,14 @@ class PointerVector : public GenericVector<T*> {
// Also needs T::T(), as new T is used in this function.
// Returns false in case of error.
bool DeSerialize(bool swap, FILE* fp) {
int32_t reserved;
uint32_t reserved;
if (fread(&reserved, sizeof(reserved), 1, fp) != 1) return false;
if (swap) Reverse32(&reserved);
// Arbitrarily limit the number of elements to protect against bad data.
if (reserved > UINT16_MAX) return false;
GenericVector<T*>::reserve(reserved);
truncate(0);
for (int i = 0; i < reserved; ++i) {
for (uint32_t i = 0; i < reserved; ++i) {
int8_t non_null;
if (fread(&non_null, sizeof(non_null), 1, fp) != 1) return false;
T* item = nullptr;
Expand Down
2 changes: 1 addition & 1 deletion src/classify/adaptmatch.cpp
Expand Up @@ -1339,7 +1339,7 @@ int Classify::CharNormTrainingSample(bool pruner_only,
ADAPT_RESULTS* adapt_results = new ADAPT_RESULTS();
adapt_results->Initialize();
// Compute the bounding box of the features.
int num_features = sample.num_features();
uint32_t num_features = sample.num_features();
// Only the top and bottom of the blob_box are used by MasterMatcher, so
// fabricate right and left using top and bottom.
TBOX blob_box(sample.geo_feature(GeoBottom), sample.geo_feature(GeoBottom),
Expand Down
8 changes: 3 additions & 5 deletions src/classify/mastertrainer.cpp
@@ -1,5 +1,3 @@
// Copyright 2010 Google Inc. All Rights Reserved.
// Author: rays@google.com (Ray Smith)
///////////////////////////////////////////////////////////////////////
// File: mastertrainer.cpp
// Description: Trainer to build the MasterClassifier.
Expand Down Expand Up @@ -552,8 +550,8 @@ CLUSTERER* MasterTrainer::SetupForClustering(
int sample_id = 0;
for (int i = sample_ptrs.size() - 1; i >= 0; --i) {
const TrainingSample* sample = sample_ptrs[i];
int num_features = sample->num_micro_features();
for (int f = 0; f < num_features; ++f)
uint32_t num_features = sample->num_micro_features();
for (uint32_t f = 0; f < num_features; ++f)
MakeSample(clusterer, sample->micro_features()[f], sample_id);
++sample_id;
}
Expand Down Expand Up @@ -706,7 +704,7 @@ void MasterTrainer::DisplaySamples(const char* unichar_str1, int cloud_font,
if (class_id2 != INVALID_UNICHAR_ID && canonical_font >= 0) {
const TrainingSample* sample = samples_.GetCanonicalSample(canonical_font,
class_id2);
for (int f = 0; f < sample->num_features(); ++f) {
for (uint32_t f = 0; f < sample->num_features(); ++f) {
RenderIntFeature(f_window, &sample->features()[f], ScrollView::RED);
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/classify/picofeat.cpp
Expand Up @@ -224,10 +224,10 @@ FEATURE_SET Classify::ExtractIntCNFeatures(
blob, false, &local_fx_info, &bl_features);
if (sample == nullptr) return nullptr;

int num_features = sample->num_features();
uint32_t num_features = sample->num_features();
const INT_FEATURE_STRUCT* features = sample->features();
FEATURE_SET feature_set = NewFeatureSet(num_features);
for (int f = 0; f < num_features; ++f) {
for (uint32_t f = 0; f < num_features; ++f) {
FEATURE feature = NewFeature(&IntFeatDesc);

feature->Params[IntX] = features[f].X;
Expand Down
4 changes: 2 additions & 2 deletions src/classify/shapeclassifier.cpp
Expand Up @@ -109,8 +109,8 @@ void ShapeClassifier::DebugDisplay(const TrainingSample& sample,
popup_menu->BuildMenu(debug_win, false);
// Display the features in green.
const INT_FEATURE_STRUCT* features = sample.features();
int num_features = sample.num_features();
for (int f = 0; f < num_features; ++f) {
uint32_t num_features = sample.num_features();
for (uint32_t f = 0; f < num_features; ++f) {
RenderIntFeature(debug_win, &features[f], ScrollView::GREEN);
}
debug_win->Update();
Expand Down
28 changes: 14 additions & 14 deletions src/classify/trainingsample.cpp
Expand Up @@ -61,12 +61,10 @@ bool TrainingSample::Serialize(FILE* fp) const {
return false;
if (fwrite(&outline_length_, sizeof(outline_length_), 1, fp) != 1)
return false;
if (static_cast<int>(fwrite(features_, sizeof(*features_), num_features_, fp))
!= num_features_)
if (fwrite(features_, sizeof(*features_), num_features_, fp) != num_features_)
return false;
if (static_cast<int>(fwrite(micro_features_, sizeof(*micro_features_),
num_micro_features_,
fp)) != num_micro_features_)
if (fwrite(micro_features_, sizeof(*micro_features_), num_micro_features_,
fp) != num_micro_features_)
return false;
if (fwrite(cn_feature_, sizeof(*cn_feature_), kNumCNParams, fp) !=
kNumCNParams) return false;
Expand Down Expand Up @@ -102,16 +100,18 @@ bool TrainingSample::DeSerialize(bool swap, FILE* fp) {
ReverseN(&num_micro_features_, sizeof(num_micro_features_));
ReverseN(&outline_length_, sizeof(outline_length_));
}
// Arbitrarily limit the number of elements to protect against bad data.
if (num_features_ > UINT16_MAX) return false;
if (num_micro_features_ > UINT16_MAX) return false;
delete [] features_;
features_ = new INT_FEATURE_STRUCT[num_features_];
if (static_cast<int>(fread(features_, sizeof(*features_), num_features_, fp))
if (fread(features_, sizeof(*features_), num_features_, fp)
!= num_features_)
return false;
delete [] micro_features_;
micro_features_ = new MicroFeature[num_micro_features_];
if (static_cast<int>(fread(micro_features_, sizeof(*micro_features_),
num_micro_features_,
fp)) != num_micro_features_)
if (fread(micro_features_, sizeof(*micro_features_), num_micro_features_,
fp) != num_micro_features_)
return false;
if (fread(cn_feature_, sizeof(*cn_feature_), kNumCNParams, fp) !=
kNumCNParams) return false;
Expand Down Expand Up @@ -165,7 +165,7 @@ TrainingSample* TrainingSample::RandomizedCopy(int index) const {
++index; // Remove the first combination.
const int yshift = kYShiftValues[index / kSampleScaleSize];
double scaling = kScaleValues[index % kSampleScaleSize];
for (int i = 0; i < num_features_; ++i) {
for (uint32_t i = 0; i < num_features_; ++i) {
double result = (features_[i].X - kRandomizingCenter) * scaling;
result += kRandomizingCenter;
sample->features_[i].X = ClipToRange<int>(result + 0.5, 0, UINT8_MAX);
Expand Down Expand Up @@ -217,7 +217,7 @@ void TrainingSample::ExtractCharDesc(int int_feature_type,
} else {
num_features_ = char_features->NumFeatures;
features_ = new INT_FEATURE_STRUCT[num_features_];
for (int f = 0; f < num_features_; ++f) {
for (uint32_t f = 0; f < num_features_; ++f) {
features_[f].X =
static_cast<uint8_t>(char_features->Features[f]->Params[IntX]);
features_[f].Y =
Expand All @@ -238,7 +238,7 @@ void TrainingSample::ExtractCharDesc(int int_feature_type,
} else {
num_micro_features_ = char_features->NumFeatures;
micro_features_ = new MicroFeature[num_micro_features_];
for (int f = 0; f < num_micro_features_; ++f) {
for (uint32_t f = 0; f < num_micro_features_; ++f) {
for (int d = 0; d < MFCount; ++d) {
micro_features_[f][d] = char_features->Features[f]->Params[d];
}
Expand Down Expand Up @@ -294,7 +294,7 @@ void TrainingSample::MapFeatures(const IntFeatureMap& feature_map) {
// Returns a pix representing the sample. (Int features only.)
Pix* TrainingSample::RenderToPix(const UNICHARSET* unicharset) const {
Pix* pix = pixCreate(kIntFeatureExtent, kIntFeatureExtent, 1);
for (int f = 0; f < num_features_; ++f) {
for (uint32_t f = 0; f < num_features_; ++f) {
int start_x = features_[f].X;
int start_y = kIntFeatureExtent - features_[f].Y;
double dx = cos((features_[f].Theta / 256.0) * 2.0 * M_PI - M_PI);
Expand All @@ -315,7 +315,7 @@ Pix* TrainingSample::RenderToPix(const UNICHARSET* unicharset) const {
void TrainingSample::DisplayFeatures(ScrollView::Color color,
ScrollView* window) const {
#ifndef GRAPHICS_DISABLED
for (int f = 0; f < num_features_; ++f) {
for (uint32_t f = 0; f < num_features_; ++f) {
RenderIntFeature(window, &features_[f], color);
}
#endif // GRAPHICS_DISABLED
Expand Down
8 changes: 4 additions & 4 deletions src/classify/trainingsample.h
Expand Up @@ -137,13 +137,13 @@ class TrainingSample : public ELIST_LINK {
void set_bounding_box(const TBOX& box) {
bounding_box_ = box;
}
int num_features() const {
uint32_t num_features() const {
return num_features_;
}
const INT_FEATURE_STRUCT* features() const {
return features_;
}
int num_micro_features() const {
uint32_t num_micro_features() const {
return num_micro_features_;
}
const MicroFeature* micro_features() const {
Expand Down Expand Up @@ -206,9 +206,9 @@ class TrainingSample : public ELIST_LINK {
// Bounding box of sample in original image.
TBOX bounding_box_;
// Number of INT_FEATURE_STRUCT in features_ array.
int num_features_;
uint32_t num_features_;
// Number of MicroFeature in micro_features_ array.
int num_micro_features_;
uint32_t num_micro_features_;
// Total length of outline in the baseline normalized coordinate space.
// See comment in WERD_RES class definition for a discussion of coordinate
// spaces.
Expand Down

0 comments on commit 92e2ad0

Please sign in to comment.