Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Added HOG features to cascade detection algorithm.

Added pedestrian detection trained model for HOG cascade detection algorithm.
  • Loading branch information...
commit 50543d627fd88dc96d28544b680d13010b11ef76 1 parent 4ee462c
@CheeseWiz CheeseWiz authored
View
3,986 data/hogcascades/hogcascade_pedestrians.xml
3,986 additions, 0 deletions not shown
View
2  modules/objdetect/include/opencv2/objdetect/objdetect.hpp
@@ -298,7 +298,7 @@ CV_EXPORTS void groupRectangles_meanshift(vector<Rect>& rectList, vector<double>
class CV_EXPORTS FeatureEvaluator
{
public:
- enum { HAAR = 0, LBP = 1 };
+ enum { HAAR = 0, LBP = 1, HOG = 2 };
virtual ~FeatureEvaluator();
virtual bool read(const FileNode& node);
View
240 modules/objdetect/src/cascadedetect.cpp
@@ -592,14 +592,197 @@ bool LBPEvaluator::setWindow( Point pt )
return false;
offset = pt.y * ((int)sum.step/sizeof(int)) + pt.x;
return true;
+}
+
+//---------------------------------------------- HOGEvaluator ---------------------------------------
+bool HOGEvaluator::Feature :: read( const FileNode& node )
+{
+ FileNode rnode = node[CC_RECT];
+ FileNodeIterator it = rnode.begin();
+ it >> rect[0].x >> rect[0].y >> rect[0].width >> rect[0].height >> featComponent;
+ rect[1].x = rect[0].x + rect[0].width;
+ rect[1].y = rect[0].y;
+ rect[2].x = rect[0].x;
+ rect[2].y = rect[0].y + rect[0].height;
+ rect[3].x = rect[0].x + rect[0].width;
+ rect[3].y = rect[0].y + rect[0].height;
+ rect[1].width = rect[2].width = rect[3].width = rect[0].width;
+ rect[1].height = rect[2].height = rect[3].height = rect[0].height;
+ return true;
+}
+
+HOGEvaluator::HOGEvaluator()
+{
+ features = new vector<Feature>();
+}
+
+HOGEvaluator::~HOGEvaluator()
+{
+}
+
+bool HOGEvaluator::read( const FileNode& node )
+{
+ features->resize(node.size());
+ featuresPtr = &(*features)[0];
+ FileNodeIterator it = node.begin(), it_end = node.end();
+ for(int i = 0; it != it_end; ++it, i++)
+ {
+ if(!featuresPtr[i].read(*it))
+ return false;
+ }
+ return true;
+}
+
+Ptr<FeatureEvaluator> HOGEvaluator::clone() const
+{
+ HOGEvaluator* ret = new HOGEvaluator;
+ ret->origWinSize = origWinSize;
+ ret->features = features;
+ ret->featuresPtr = &(*ret->features)[0];
+ ret->offset = offset;
+ ret->hist = hist;
+ ret->normSum = normSum;
+ return ret;
+}
+
+bool HOGEvaluator::setImage( const Mat& image, Size winSize )
+{
+ int rows = image.rows + 1;
+ int cols = image.cols + 1;
+ origWinSize = winSize;
+ if( image.cols < origWinSize.width || image.rows < origWinSize.height )
+ return false;
+ hist.clear();
+ for( int bin = 0; bin < Feature::BIN_NUM; bin++ )
+ {
+ hist.push_back( Mat(rows, cols, CV_32FC1) );
+ }
+ normSum.create( rows, cols, CV_32FC1 );
+
+ integralHistogram( image, hist, normSum, Feature::BIN_NUM );
+
+ size_t featIdx, featCount = features->size();
+
+ for( featIdx = 0; featIdx < featCount; featIdx++ )
+ {
+ featuresPtr[featIdx].updatePtrs( hist, normSum );
+ }
+ return true;
}
-Ptr<FeatureEvaluator> FeatureEvaluator::create(int featureType)
+bool HOGEvaluator::setWindow(Point pt)
+{
+ if( pt.x < 0 || pt.y < 0 ||
+ pt.x + origWinSize.width >= hist[0].cols-2 ||
+ pt.y + origWinSize.height >= hist[0].rows-2 )
+ return false;
+ offset = pt.y * ((int)hist[0].step/sizeof(float)) + pt.x;
+ return true;
+}
+
+void HOGEvaluator::integralHistogram(const Mat &img, vector<Mat> &histogram, Mat &norm, int nbins) const
+{
+ CV_Assert( img.type() == CV_8U || img.type() == CV_8UC3 );
+ int x, y, binIdx;
+
+ Size gradSize(img.size());
+ Size histSize(histogram[0].size());
+ Mat grad(gradSize, CV_32F);
+ Mat qangle(gradSize, CV_8U);
+
+ AutoBuffer<int> mapbuf(gradSize.width + gradSize.height + 4);
+ int* xmap = (int*)mapbuf + 1;
+ int* ymap = xmap + gradSize.width + 2;
+
+ const int borderType = (int)BORDER_REPLICATE;
+
+ for( x = -1; x < gradSize.width + 1; x++ )
+ xmap[x] = borderInterpolate(x, gradSize.width, borderType);
+ for( y = -1; y < gradSize.height + 1; y++ )
+ ymap[y] = borderInterpolate(y, gradSize.height, borderType);
+
+ int width = gradSize.width;
+ AutoBuffer<float> _dbuf(width*4);
+ float* dbuf = _dbuf;
+ Mat Dx(1, width, CV_32F, dbuf);
+ Mat Dy(1, width, CV_32F, dbuf + width);
+ Mat Mag(1, width, CV_32F, dbuf + width*2);
+ Mat Angle(1, width, CV_32F, dbuf + width*3);
+
+ float angleScale = (float)(nbins/CV_PI);
+
+ for( y = 0; y < gradSize.height; y++ )
+ {
+ const uchar* currPtr = img.data + img.step*ymap[y];
+ const uchar* prevPtr = img.data + img.step*ymap[y-1];
+ const uchar* nextPtr = img.data + img.step*ymap[y+1];
+ float* gradPtr = (float*)grad.ptr(y);
+ uchar* qanglePtr = (uchar*)qangle.ptr(y);
+
+ for( x = 0; x < width; x++ )
+ {
+ dbuf[x] = (float)(currPtr[xmap[x+1]] - currPtr[xmap[x-1]]);
+ dbuf[width + x] = (float)(nextPtr[xmap[x]] - prevPtr[xmap[x]]);
+ }
+ cartToPolar( Dx, Dy, Mag, Angle, false );
+ for( x = 0; x < width; x++ )
+ {
+ float mag = dbuf[x+width*2];
+ float angle = dbuf[x+width*3];
+ angle = angle*angleScale - 0.5f;
+ int bidx = cvFloor(angle);
+ angle -= bidx;
+ if( bidx < 0 )
+ bidx += nbins;
+ else if( bidx >= nbins )
+ bidx -= nbins;
+
+ qanglePtr[x] = (uchar)bidx;
+ gradPtr[x] = mag;
+ }
+ }
+ integral(grad, norm, grad.depth());
+
+ float* histBuf;
+ const float* magBuf;
+ const uchar* binsBuf;
+
+ int binsStep = (int)( qangle.step / sizeof(uchar) );
+ int histStep = (int)( histogram[0].step / sizeof(float) );
+ int magStep = (int)( grad.step / sizeof(float) );
+ for( binIdx = 0; binIdx < nbins; binIdx++ )
+ {
+ histBuf = (float*)histogram[binIdx].data;
+ magBuf = (const float*)grad.data;
+ binsBuf = (const uchar*)qangle.data;
+
+ memset( histBuf, 0, histSize.width * sizeof(histBuf[0]) );
+ histBuf += histStep + 1;
+ for( y = 0; y < qangle.rows; y++ )
+ {
+ histBuf[-1] = 0.f;
+ float strSum = 0.f;
+ for( x = 0; x < qangle.cols; x++ )
+ {
+ if( binsBuf[x] == binIdx )
+ strSum += magBuf[x];
+ histBuf[x] = histBuf[-histStep + x] + strSum;
+ }
+ histBuf += histStep;
+ binsBuf += binsStep;
+ magBuf += magStep;
+ }
+ }
+}
+
+Ptr<FeatureEvaluator> FeatureEvaluator::create( int featureType )
{
return featureType == HAAR ? Ptr<FeatureEvaluator>(new HaarEvaluator) :
- featureType == LBP ? Ptr<FeatureEvaluator>(new LBPEvaluator) : Ptr<FeatureEvaluator>();
+ featureType == LBP ? Ptr<FeatureEvaluator>(new LBPEvaluator) :
+ featureType == HOG ? Ptr<FeatureEvaluator>(new HOGEvaluator) :
+ Ptr<FeatureEvaluator>();
}
-
+
//---------------------------------------- Classifier Cascade --------------------------------------------
CascadeClassifier::CascadeClassifier()
@@ -637,21 +820,38 @@ bool CascadeClassifier::load(const string& filename)
return !oldCascade.empty();
}
-
int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& featureEvaluator, Point pt, double& weight )
{
CV_Assert( oldCascade.empty() );
- assert(data.featureType == FeatureEvaluator::HAAR ||
- data.featureType == FeatureEvaluator::LBP);
+ assert( data.featureType == FeatureEvaluator::HAAR ||
+ data.featureType == FeatureEvaluator::LBP ||
+ data.featureType == FeatureEvaluator::HOG );
- return !featureEvaluator->setWindow(pt) ? -1 :
- data.isStumpBased ? ( data.featureType == FeatureEvaluator::HAAR ?
- predictOrderedStump<HaarEvaluator>( *this, featureEvaluator, weight ) :
- predictCategoricalStump<LBPEvaluator>( *this, featureEvaluator, weight ) ) :
- ( data.featureType == FeatureEvaluator::HAAR ?
- predictOrdered<HaarEvaluator>( *this, featureEvaluator, weight ) :
- predictCategorical<LBPEvaluator>( *this, featureEvaluator, weight ) );
+ if( !featureEvaluator->setWindow(pt) )
+ return -1;
+ if( data.isStumpBased )
+ {
+ if( data.featureType == FeatureEvaluator::HAAR )
+ return predictOrderedStump<HaarEvaluator>( *this, featureEvaluator, weight );
+ else if( data.featureType == FeatureEvaluator::LBP )
+ return predictCategoricalStump<LBPEvaluator>( *this, featureEvaluator, weight );
+ else if( data.featureType == FeatureEvaluator::HOG )
+ return predictOrderedStump<HOGEvaluator>( *this, featureEvaluator, weight );
+ else
+ return -2;
+ }
+ else
+ {
+ if( data.featureType == FeatureEvaluator::HAAR )
+ return predictOrdered<HaarEvaluator>( *this, featureEvaluator, weight );
+ else if( data.featureType == FeatureEvaluator::LBP )
+ return predictCategorical<LBPEvaluator>( *this, featureEvaluator, weight );
+ else if( data.featureType == FeatureEvaluator::HOG )
+ return predictOrdered<HOGEvaluator>( *this, featureEvaluator, weight );
+ else
+ return -2;
+ }
}
bool CascadeClassifier::setImage( Ptr<FeatureEvaluator>& featureEvaluator, const Mat& image )
@@ -827,7 +1027,16 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
Mat scaledImage( scaledImageSize, CV_8U, imageBuffer.data );
resize( grayImage, scaledImage, scaledImageSize, 0, 0, CV_INTER_LINEAR );
- int yStep = factor > 2. ? 1 : 2;
+ int yStep;
+ if( getFeatureType() == cv::FeatureEvaluator::HOG )
+ {
+ yStep = 4;
+ }
+ else
+ {
+ yStep = factor > 2. ? 1 : 2;
+ }
+
int stripCount, stripSize;
#if defined(HAVE_TBB) || defined(HAVE_THREADING_FRAMEWORK)
@@ -885,6 +1094,9 @@ bool CascadeClassifier::Data::read(const FileNode &root)
featureType = FeatureEvaluator::HAAR;
else if( featureTypeStr == CC_LBP )
featureType = FeatureEvaluator::LBP;
+ else if( featureTypeStr == CC_HOG )
+ featureType = FeatureEvaluator::HOG;
+
else
return false;
View
80 modules/objdetect/src/cascadedetect.hpp
@@ -32,6 +32,8 @@ namespace cv
#define CC_LBP "LBP"
#define CC_RECT "rect"
+#define CC_HOG "HOG"
+
#define CV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \
/* (x, y) */ \
(p0) = sum + (rect).x + (step) * (rect).y, \
@@ -236,6 +238,84 @@ inline void LBPEvaluator::Feature :: updatePtrs( const Mat& sum )
CV_SUM_PTRS( p[8], p[9], p[12], p[13], ptr, tr, step );
}
+//---------------------------------------------- HOGEvaluator -------------------------------------------
+
+class HOGEvaluator : public FeatureEvaluator
+{
+public:
+ struct Feature
+ {
+ Feature();
+ float calc( int offset ) const;
+ void updatePtrs( const vector<Mat>& _hist, const Mat &_normSum );
+ bool read( const FileNode& node );
+
+ enum { CELL_NUM = 4, BIN_NUM = 9 };
+
+ Rect rect[CELL_NUM];
+ int featComponent; //component index from 0 to 35
+ const float* pF[4]; //for feature calculation
+ const float* pN[4]; //for normalization calculation
+ };
+ HOGEvaluator();
+ virtual ~HOGEvaluator();
+ virtual bool read( const FileNode& node );
+ virtual Ptr<FeatureEvaluator> clone() const;
+ virtual int getFeatureType() const { return FeatureEvaluator::HOG; }
+ virtual bool setImage( const Mat& image, Size winSize );
+ virtual bool setWindow( Point pt );
+ double operator()(int featureIdx) const
+ {
+ return featuresPtr[featureIdx].calc(offset);
+ }
+ virtual double calcOrd( int featureIdx ) const
+ {
+ return (*this)(featureIdx);
+ }
+
+private:
+ virtual void integralHistogram( const Mat& srcImage, vector<Mat> &histogram, Mat &norm, int nbins ) const;
+
+ Size origWinSize;
+ Ptr<vector<Feature>> features;
+ Feature* featuresPtr;
+ vector<Mat> hist;
+ Mat normSum;
+ int offset;
+};
+
+inline HOGEvaluator::Feature :: Feature()
+{
+ rect[0] = rect[1] = rect[2] = rect[3] = Rect();
+ pF[0] = pF[1] = pF[2] = pF[3] = 0;
+ pN[0] = pN[1] = pN[2] = pN[3] = 0;
+ featComponent = 0;
+}
+
+inline float HOGEvaluator::Feature :: calc( int offset ) const
+{
+ float res = CALC_SUM(pF, offset);
+ float normFactor = CALC_SUM(pN, offset);
+ res = (res > 0.001f) ? (res / ( normFactor + 0.001f) ) : 0.f;
+ return res;
+}
+
+inline void HOGEvaluator::Feature :: updatePtrs( const vector<Mat> &_hist, const Mat &_normSum )
+{
+ int binIdx = featComponent % BIN_NUM;
+ int cellIdx = featComponent / BIN_NUM;
+ Rect normRect = Rect( rect[0].x, rect[0].y, 2*rect[0].width, 2*rect[0].height );
+
+ const float* featBuf = (const float*)_hist[binIdx].data;
+ size_t featStep = _hist[0].step / sizeof(featBuf[0]);
+
+ const float* normBuf = (const float*)_normSum.data;
+ size_t normStep = _normSum.step / sizeof(normBuf[0]);
+
+ CV_SUM_PTRS( pF[0], pF[1], pF[2], pF[3], featBuf, rect[cellIdx], featStep );
+ CV_SUM_PTRS( pN[0], pN[1], pN[2], pN[3], normBuf, normRect, normStep );
+}
+

1 comment on commit 50543d6

@codingforfun

Hello, I would be curious to know on what kind of data and how many samples you trained the cascade you included in this commit.
Thank you.

Please sign in to comment.
Something went wrong with that request. Please try again.