From 68cbcbe1ebbe4e109445d8cb0bd5b84bb3daf11f Mon Sep 17 00:00:00 2001 From: Jordan Cheney Date: Fri, 1 May 2015 19:03:20 -0400 Subject: [PATCH] Front end copied, cleaned up (a little) and working --- openbr/core/cascade.cpp | 390 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ openbr/core/cascade.h | 406 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ openbr/plugins/metadata/cascade.cpp | 12 ++++++------ 3 files changed, 802 insertions(+), 6 deletions(-) create mode 100644 openbr/core/cascade.cpp create mode 100644 openbr/core/cascade.h diff --git a/openbr/core/cascade.cpp b/openbr/core/cascade.cpp new file mode 100644 index 0000000..fecd53c --- /dev/null +++ b/openbr/core/cascade.cpp @@ -0,0 +1,390 @@ +#include "cascade.h" + +using namespace br; + +void br::groupRectangles(vector& rectList, int groupThreshold, double eps, vector* weights, vector* levelWeights) +{ + if( groupThreshold <= 0 || rectList.empty() ) + { + if( weights ) + { + size_t i, sz = rectList.size(); + weights->resize(sz); + for( i = 0; i < sz; i++ ) + (*weights)[i] = 1; + } + return; + } + + vector labels; + int nclasses = partition(rectList, labels, SimilarRects(eps)); + + vector rrects(nclasses); + vector rweights(nclasses, 0); + vector rejectLevels(nclasses, 0); + vector rejectWeights(nclasses, DBL_MIN); + int i, j, nlabels = (int)labels.size(); + for( i = 0; i < nlabels; i++ ) + { + int cls = labels[i]; + rrects[cls].x += rectList[i].x; + rrects[cls].y += rectList[i].y; + rrects[cls].width += rectList[i].width; + rrects[cls].height += rectList[i].height; + rweights[cls]++; + } + if ( levelWeights && weights && !weights->empty() && !levelWeights->empty() ) + { + for( i = 0; i < nlabels; i++ ) + { + int cls = labels[i]; + if( (*weights)[i] > rejectLevels[cls] ) + { + rejectLevels[cls] = (*weights)[i]; + rejectWeights[cls] = (*levelWeights)[i]; + } + else if( ( (*weights)[i] == rejectLevels[cls] ) && ( (*levelWeights)[i] > rejectWeights[cls] ) ) + rejectWeights[cls] = (*levelWeights)[i]; + } + } + + for( i = 0; i < nclasses; i++ ) + { + Rect r = rrects[i]; + float s = 1.f/rweights[i]; + rrects[i] = Rect(saturate_cast(r.x*s), + saturate_cast(r.y*s), + saturate_cast(r.width*s), + saturate_cast(r.height*s)); + } + + rectList.clear(); + if( weights ) + weights->clear(); + if( levelWeights ) + levelWeights->clear(); + + for( i = 0; i < nclasses; i++ ) + { + Rect r1 = rrects[i]; + int n1 = levelWeights ? rejectLevels[i] : rweights[i]; + + double w1 = rejectWeights[i]; + if( n1 <= groupThreshold ) + continue; + // filter out small face rectangles inside large rectangles + for( j = 0; j < nclasses; j++ ) + { + int n2 = rweights[j]; + + if( j == i || n2 <= groupThreshold ) + continue; + Rect r2 = rrects[j]; + + int dx = saturate_cast( r2.width * eps ); + int dy = saturate_cast( r2.height * eps ); + + if( i != j && + r1.x >= r2.x - dx && + r1.y >= r2.y - dy && + r1.x + r1.width <= r2.x + r2.width + dx && + r1.y + r1.height <= r2.y + r2.height + dy && + (n2 > std::max(3, n1) || n1 < 3) ) + break; + } + + if( j == nclasses ) + { + rectList.push_back(r1); + if( weights ) + weights->push_back(n1); + if( levelWeights ) + levelWeights->push_back(w1); + } + } +} + +void br::groupRectangles(vector& rectList, int groupThreshold, double eps) +{ + groupRectangles(rectList, groupThreshold, eps, 0, 0); +} + +void br::groupRectangles(vector& rectList, vector& weights, int groupThreshold, double eps) +{ + groupRectangles(rectList, groupThreshold, eps, &weights, 0); +} +//used for cascade detection algorithm for ROC-curve calculating +void br::groupRectangles(vector& rectList, vector& rejectLevels, vector& levelWeights, int groupThreshold, double eps) +{ + groupRectangles(rectList, groupThreshold, eps, &rejectLevels, &levelWeights); +} + +bool _FeatureEvaluator::Feature::read(const FileNode& node ) +{ + FileNode rnode = node[CC_RECT]; + FileNodeIterator it = rnode.begin(); + it >> rect.x >> rect.y >> rect.width >> rect.height; + return true; +} + +bool _FeatureEvaluator::read( const FileNode& node ) +{ + features->resize(node.size()); + featuresPtr = &(*features)[0]; + FileNodeIterator it = node.begin(), it_end = node.end(); + for(int i = 0; it != it_end; ++it, i++) + { + if(!featuresPtr[i].read(*it)) + return false; + } + return true; +} + +bool _FeatureEvaluator::setImage( const Mat& image, Size _origWinSize ) +{ + int rn = image.rows+1, cn = image.cols+1; + origWinSize = _origWinSize; + + if( image.cols < origWinSize.width || image.rows < origWinSize.height ) + return false; + + if( sum0.rows < rn || sum0.cols < cn ) + sum0.create(rn, cn, CV_32S); + sum = Mat(rn, cn, CV_32S, sum0.data); + integral(image, sum); + + size_t fi, nfeatures = features->size(); + + for( fi = 0; fi < nfeatures; fi++ ) + featuresPtr[fi].updatePtrs( sum ); + return true; +} + +bool _FeatureEvaluator::setWindow( Point pt ) +{ + if( pt.x < 0 || pt.y < 0 || + pt.x + origWinSize.width >= sum.cols || + pt.y + origWinSize.height >= sum.rows ) + return false; + offset = pt.y * ((int)sum.step/sizeof(int)) + pt.x; + return true; +} + +// --------------------------------- Cascade Classifier ---------------------------------- + +bool _CascadeClassifier::load(const string& filename) +{ + data = Data(); + featureEvaluator.release(); + + FileStorage fs(filename, FileStorage::READ); + if( !fs.isOpened() ) + return false; + + if( read(fs.getFirstTopLevelNode()) ) + return true; + + return false; +} + +bool _CascadeClassifier::read(const FileNode& root) +{ + if( !data.read(root) ) + return false; + + // load features + featureEvaluator = Ptr<_FeatureEvaluator>(new _FeatureEvaluator()); + FileNode fn = root[CC_FEATURES]; + if( fn.empty() ) + return false; + + return featureEvaluator->read(fn); +} + +int _CascadeClassifier::runAt(Point pt, double& weight) +{ + if( !featureEvaluator->setWindow(pt) ) + return -1; + + if( data.isStumpBased ) + return predictCategoricalStump<_FeatureEvaluator>( *this, featureEvaluator, weight ); + return predictCategorical<_FeatureEvaluator>( *this, featureEvaluator, weight ); +} + +void _CascadeClassifier::detectMultiScale( const Mat& image, vector& objects, + vector& rejectLevels, + vector& levelWeights, + double scaleFactor, int minNeighbors, + int flags, Size minObjectSize, Size maxObjectSize, + bool outputRejectLevels ) +{ + const double GROUP_EPS = 0.2; + + CV_Assert( scaleFactor > 1 && image.depth() == CV_8U ); + + if (data.stages.empty()) + return; + + if( maxObjectSize.height == 0 || maxObjectSize.width == 0 ) + maxObjectSize = image.size(); + + Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U); + + for (double factor = 1; ; factor *= scaleFactor) { + Size originalWindowSize = data.origWinSize; + + Size windowSize(cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) ); + Size scaledImageSize(cvRound(image.cols/factor ), cvRound(image.rows/factor)); + Size processingRectSize(scaledImageSize.width - originalWindowSize.width, scaledImageSize.height - originalWindowSize.height); + + if (processingRectSize.width <= 0 || processingRectSize.height <= 0) + break; + if (windowSize.width > maxObjectSize.width || windowSize.height > maxObjectSize.height) + break; + if (windowSize.width < minObjectSize.width || windowSize.height < minObjectSize.height) + continue; + + Mat scaledImage(scaledImageSize, CV_8U, imageBuffer.data); + resize(image, scaledImage, scaledImageSize, 0, 0, CV_INTER_LINEAR); + if (!featureEvaluator->setImage(scaledImage, originalWindowSize)) + qFatal("Couldn't set the image"); + + int yStep = factor > 2. ? 1 : 2; + for (int y = 0; y < processingRectSize.height; y += yStep) { + for (int x = 0; x < processingRectSize.width; x += yStep) { + double gypWeight; + int result = runAt(Point(x, y), gypWeight); + + if (outputRejectLevels) { + if (result == 1) + result = -(int)data.stages.size(); + if (data.stages.size() + result < 4) { + objects.push_back(Rect(cvRound(x*factor), cvRound(y*factor), windowSize.width, windowSize.height)); + rejectLevels.push_back(-result); + levelWeights.push_back(gypWeight); + } + } + else if (result > 0) { + objects.push_back(Rect(cvRound(x*factor), cvRound(y*factor), windowSize.width, windowSize.height)); + } + if (result == 0) + x += yStep; + } + } + } + + if (outputRejectLevels) + groupRectangles(objects, rejectLevels, levelWeights, minNeighbors, GROUP_EPS); + else + groupRectangles(objects, minNeighbors, GROUP_EPS); +} + +void _CascadeClassifier::detectMultiScale( const Mat& image, vector& objects, + double scaleFactor, int minNeighbors, + int flags, Size minObjectSize, Size maxObjectSize) +{ + vector fakeLevels; + vector fakeWeights; + detectMultiScale( image, objects, fakeLevels, fakeWeights, scaleFactor, + minNeighbors, flags, minObjectSize, maxObjectSize, false ); +} + +bool _CascadeClassifier::Data::read(const FileNode &root) +{ + static const float THRESHOLD_EPS = 1e-5f; + + // load stage params + string stageTypeStr = (string)root[CC_STAGE_TYPE]; + if( stageTypeStr == CC_BOOST ) + stageType = BOOST; + else + return false; + + featureType = _FeatureEvaluator::LBP; + + origWinSize.width = (int)root[CC_WIDTH]; + origWinSize.height = (int)root[CC_HEIGHT]; + CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 ); + + isStumpBased = (int)(root[CC_STAGE_PARAMS][CC_MAX_DEPTH]) == 1 ? true : false; + + // load feature params + FileNode fn = root[CC_FEATURE_PARAMS]; + if( fn.empty() ) + return false; + + ncategories = fn[CC_MAX_CAT_COUNT]; + int subsetSize = (ncategories + 31)/32, + nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 ); + + // load stages + fn = root[CC_STAGES]; + if( fn.empty() ) + return false; + + stages.reserve(fn.size()); + classifiers.clear(); + nodes.clear(); + + FileNodeIterator it = fn.begin(), it_end = fn.end(); + + for( int si = 0; it != it_end; si++, ++it ) + { + FileNode fns = *it; + Stage stage; + stage.threshold = (float)fns[CC_STAGE_THRESHOLD] - THRESHOLD_EPS; + fns = fns[CC_WEAK_CLASSIFIERS]; + if(fns.empty()) + return false; + stage.ntrees = (int)fns.size(); + stage.first = (int)classifiers.size(); + stages.push_back(stage); + classifiers.reserve(stages[si].first + stages[si].ntrees); + + FileNodeIterator it1 = fns.begin(), it1_end = fns.end(); + for( ; it1 != it1_end; ++it1 ) // weak trees + { + FileNode fnw = *it1; + FileNode internalNodes = fnw[CC_INTERNAL_NODES]; + FileNode leafValues = fnw[CC_LEAF_VALUES]; + if( internalNodes.empty() || leafValues.empty() ) + return false; + + DTree tree; + tree.nodeCount = (int)internalNodes.size()/nodeStep; + classifiers.push_back(tree); + + nodes.reserve(nodes.size() + tree.nodeCount); + leaves.reserve(leaves.size() + leafValues.size()); + if( subsetSize > 0 ) + subsets.reserve(subsets.size() + tree.nodeCount*subsetSize); + + FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end(); + + for( ; internalNodesIter != internalNodesEnd; ) // nodes + { + DTreeNode node; + node.left = (int)*internalNodesIter; ++internalNodesIter; + node.right = (int)*internalNodesIter; ++internalNodesIter; + node.featureIdx = (int)*internalNodesIter; ++internalNodesIter; + if( subsetSize > 0 ) + { + for( int j = 0; j < subsetSize; j++, ++internalNodesIter ) + subsets.push_back((int)*internalNodesIter); + node.threshold = 0.f; + } + else + { + node.threshold = (float)*internalNodesIter; ++internalNodesIter; + } + nodes.push_back(node); + } + + internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end(); + + for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves + leaves.push_back((float)*internalNodesIter); + } + } + return true; +} diff --git a/openbr/core/cascade.h b/openbr/core/cascade.h new file mode 100644 index 0000000..b63adab --- /dev/null +++ b/openbr/core/cascade.h @@ -0,0 +1,406 @@ +#ifndef CASCADE_H +#define CASCADE_H + +#include +#include + +#define CC_CASCADE_PARAMS "cascadeParams" +#define CC_STAGE_TYPE "stageType" +#define CC_FEATURE_TYPE "featureType" +#define CC_HEIGHT "height" +#define CC_WIDTH "width" + +#define CC_STAGE_NUM "stageNum" +#define CC_STAGES "stages" +#define CC_STAGE_PARAMS "stageParams" + +#define CC_BOOST "BOOST" +#define CC_MAX_DEPTH "maxDepth" +#define CC_WEAK_COUNT "maxWeakCount" +#define CC_STAGE_THRESHOLD "stageThreshold" +#define CC_WEAK_CLASSIFIERS "weakClassifiers" +#define CC_INTERNAL_NODES "internalNodes" +#define CC_LEAF_VALUES "leafValues" + +#define CC_FEATURES "features" +#define CC_FEATURE_PARAMS "featureParams" +#define CC_MAX_CAT_COUNT "maxCatCount" + +#define CC_HAAR "HAAR" +#define CC_RECTS "rects" +#define CC_TILTED "tilted" + +#define CC_LBP "LBP" +#define CC_RECT "rect" + +#define CC_HOG "HOG" +#define CC_HOGMulti "HOGMulti" + +#define CC_NPD "NPD" +#define CC_POINTS "points" + +#define CV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \ + /* (x, y) */ \ + (p0) = sum + (rect).x + (step) * (rect).y, \ + /* (x + w, y) */ \ + (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \ + /* (x, y + h) */ \ + (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \ + /* (x + w, y + h) */ \ + (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height) + +#define CALC_SUM_(p0, p1, p2, p3, offset) \ + ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset]) + +#define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset) + + +using namespace std; +using namespace cv; + +namespace br +{ + +// class for grouping object candidates, detected by Cascade Classifier, HOG etc. +// instance of the class is to be passed to cv::partition (see cxoperations.hpp) +class SimilarRects +{ +public: + SimilarRects(double _eps) : eps(_eps) {} + inline bool operator()(const Rect& r1, const Rect& r2) const + { + double delta = eps*(std::min(r1.width, r2.width) + std::min(r1.height, r2.height))*0.5; + return std::abs(r1.x - r2.x) <= delta && + std::abs(r1.y - r2.y) <= delta && + std::abs(r1.x + r1.width - r2.x - r2.width) <= delta && + std::abs(r1.y + r1.height - r2.y - r2.height) <= delta; + } + double eps; +}; + +void groupRectangles(vector& rectList, int groupThreshold, double eps=0.2); +void groupRectangles(vector& rectList, vector& weights, int groupThreshold, double eps=0.2); +void groupRectangles(vector& rectList, int groupThreshold, double eps, vector* weights, vector* levelWeights ); +void groupRectangles(vector& rectList, vector& rejectLevels, vector& levelWeights, int groupThreshold, double eps=0.2); + +class _FeatureEvaluator +{ +public: + enum { LBP = 0 }; + + _FeatureEvaluator() : features(new vector()) {} + virtual ~_FeatureEvaluator() {} + + virtual bool read( const FileNode& node ); + + virtual bool setImage(const Mat& image, Size _origWinSize); + virtual bool setWindow(Point pt); + + int operator()(int featureIdx) const { return featuresPtr[featureIdx].calc(offset); } + virtual int calcCat(int featureIdx) const { return (*this)(featureIdx); } + +protected: + struct Feature + { + Feature(); + Feature( int x, int y, int _block_w, int _block_h ) : + rect(x, y, _block_w, _block_h) {} + + int calc( int offset ) const; + void updatePtrs( const Mat& sum ); + bool read(const FileNode& node ); + + Rect rect; // weight and height for block + const int* p[16]; // fast + }; + + Size origWinSize; + Ptr > features; + Feature* featuresPtr; // optimization + Mat sum0, sum, window; + Rect normrect; + + int offset; +}; + +inline _FeatureEvaluator::Feature::Feature() +{ + rect = Rect(); + for( int i = 0; i < 16; i++ ) + p[i] = 0; +} + +inline int _FeatureEvaluator::Feature::calc( int _offset ) const +{ + int cval = CALC_SUM_( p[5], p[6], p[9], p[10], _offset ); + + return (CALC_SUM_( p[0], p[1], p[4], p[5], _offset ) >= cval ? 128 : 0) | // 0 + (CALC_SUM_( p[1], p[2], p[5], p[6], _offset ) >= cval ? 64 : 0) | // 1 + (CALC_SUM_( p[2], p[3], p[6], p[7], _offset ) >= cval ? 32 : 0) | // 2 + (CALC_SUM_( p[6], p[7], p[10], p[11], _offset ) >= cval ? 16 : 0) | // 5 + (CALC_SUM_( p[10], p[11], p[14], p[15], _offset ) >= cval ? 8 : 0)| // 8 + (CALC_SUM_( p[9], p[10], p[13], p[14], _offset ) >= cval ? 4 : 0)| // 7 + (CALC_SUM_( p[8], p[9], p[12], p[13], _offset ) >= cval ? 2 : 0)| // 6 + (CALC_SUM_( p[4], p[5], p[8], p[9], _offset ) >= cval ? 1 : 0); +} + +inline void _FeatureEvaluator::Feature::updatePtrs( const Mat& _sum ) +{ + const int* ptr = (const int*)_sum.data; + size_t step = _sum.step/sizeof(ptr[0]); + Rect tr = rect; + CV_SUM_PTRS( p[0], p[1], p[4], p[5], ptr, tr, step ); + tr.x += 2*rect.width; + CV_SUM_PTRS( p[2], p[3], p[6], p[7], ptr, tr, step ); + tr.y += 2*rect.height; + CV_SUM_PTRS( p[10], p[11], p[14], p[15], ptr, tr, step ); + tr.x -= 2*rect.width; + CV_SUM_PTRS( p[8], p[9], p[12], p[13], ptr, tr, step ); +} + +enum +{ + CASCADE_DO_CANNY_PRUNING=1, + CASCADE_SCALE_IMAGE=2, + CASCADE_FIND_BIGGEST_OBJECT=4, + CASCADE_DO_ROUGH_SEARCH=8 +}; + +class _CascadeClassifier +{ +public: + _CascadeClassifier() {} + _CascadeClassifier( const string& filename ) { load(filename); } + ~_CascadeClassifier() {} + + bool load( const string& filename ); + bool read( const FileNode& node ); + void detectMultiScale( const Mat& image, + vector& objects, + double scaleFactor=1.1, + int minNeighbors=3, int flags=0, + Size minSize=Size(), + Size maxSize=Size() ); + + void detectMultiScale( const Mat& image, + vector& objects, + vector& rejectLevels, + vector& levelWeights, + double scaleFactor=1.1, + int minNeighbors=3, int flags=0, + Size minSize=Size(), + Size maxSize=Size(), + bool outputRejectLevels=false ); + + enum { BOOST = 0 }; + enum { DO_CANNY_PRUNING = 1, SCALE_IMAGE = 2, + FIND_BIGGEST_OBJECT = 4, DO_ROUGH_SEARCH = 8 }; + + template + friend int predictOrdered( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight); + + template + friend int predictCategorical( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight); + + template + friend int predictOrderedStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight); + + template + friend int predictCategoricalStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight); + + virtual int runAt(Point pt, double& weight ); + + class Data + { + public: + struct DTreeNode + { + int featureIdx; + float threshold; // for ordered features only + int left; + int right; + }; + + struct DTree + { + int nodeCount; + }; + + struct Stage + { + int first; + int ntrees; + float threshold; + }; + + bool read(const FileNode &node); + + bool isStumpBased; + + int stageType; + int featureType; + int ncategories; + Size origWinSize; + + vector stages; + vector classifiers; + vector nodes; + vector leaves; + vector subsets; + }; + + Data data; + Ptr<_FeatureEvaluator> featureEvaluator; +}; + +template +inline int predictOrdered( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum ) +{ + int nstages = (int)cascade.data.stages.size(); + int nodeOfs = 0, leafOfs = 0; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + float* cascadeLeaves = &cascade.data.leaves[0]; + _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + _CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; + _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; + + for( int si = 0; si < nstages; si++ ) + { + _CascadeClassifier::Data::Stage& stage = cascadeStages[si]; + int wi, ntrees = stage.ntrees; + sum = 0; + + for( wi = 0; wi < ntrees; wi++ ) + { + _CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi]; + int idx = 0, root = nodeOfs; + + do + { + _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx]; + double val = featureEvaluator(node.featureIdx); + idx = val < node.threshold ? node.left : node.right; + } + while( idx > 0 ); + sum += cascadeLeaves[leafOfs - idx]; + nodeOfs += weak.nodeCount; + leafOfs += weak.nodeCount + 1; + } + if( sum < stage.threshold ) + return -si; + } + return 1; +} + +template +inline int predictCategorical( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum ) +{ + int nstages = (int)cascade.data.stages.size(); + int nodeOfs = 0, leafOfs = 0; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + size_t subsetSize = (cascade.data.ncategories + 31)/32; + int* cascadeSubsets = &cascade.data.subsets[0]; + float* cascadeLeaves = &cascade.data.leaves[0]; + _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + _CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; + _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; + + for(int si = 0; si < nstages; si++ ) + { + _CascadeClassifier::Data::Stage& stage = cascadeStages[si]; + int wi, ntrees = stage.ntrees; + sum = 0; + + for( wi = 0; wi < ntrees; wi++ ) + { + _CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi]; + int idx = 0, root = nodeOfs; + do + { + _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx]; + int c = featureEvaluator(node.featureIdx); + const int* subset = &cascadeSubsets[(root + idx)*subsetSize]; + idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right; + } + while( idx > 0 ); + sum += cascadeLeaves[leafOfs - idx]; + nodeOfs += weak.nodeCount; + leafOfs += weak.nodeCount + 1; + } + if( sum < stage.threshold ) + return -si; + } + return 1; +} + +template +inline int predictOrderedStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum ) +{ + int nodeOfs = 0, leafOfs = 0; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + float* cascadeLeaves = &cascade.data.leaves[0]; + _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; + + int nstages = (int)cascade.data.stages.size(); + for( int stageIdx = 0; stageIdx < nstages; stageIdx++ ) + { + _CascadeClassifier::Data::Stage& stage = cascadeStages[stageIdx]; + sum = 0.0; + + int ntrees = stage.ntrees; + for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 ) + { + _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs]; + double value = featureEvaluator(node.featureIdx); + sum += cascadeLeaves[ value < node.threshold ? leafOfs : leafOfs + 1 ]; + } + + if( sum < stage.threshold ) + return -stageIdx; + } + + return 1; +} + +template +inline int predictCategoricalStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum ) +{ + int nstages = (int)cascade.data.stages.size(); + int nodeOfs = 0, leafOfs = 0; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + size_t subsetSize = (cascade.data.ncategories + 31)/32; + int* cascadeSubsets = &cascade.data.subsets[0]; + float* cascadeLeaves = &cascade.data.leaves[0]; + _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; + + for( int si = 0; si < nstages; si++ ) + { + _CascadeClassifier::Data::Stage& stage = cascadeStages[si]; + int wi, ntrees = stage.ntrees; + + sum = 0; + + for( wi = 0; wi < ntrees; wi++ ) + { + _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs]; + int c = featureEvaluator(node.featureIdx); + const int* subset = &cascadeSubsets[nodeOfs*subsetSize]; + + sum += cascadeLeaves[ subset[c>>5] & (1 << (c & 31)) ? leafOfs : leafOfs+1]; + + nodeOfs++; + leafOfs += 2; + } + + if( sum < stage.threshold ) + return -si; + } + + return 1; +} + +} // namespace br + +#endif // CASCADE_H diff --git a/openbr/plugins/metadata/cascade.cpp b/openbr/plugins/metadata/cascade.cpp index 9dd61e9..d99c8c5 100644 --- a/openbr/plugins/metadata/cascade.cpp +++ b/openbr/plugins/metadata/cascade.cpp @@ -15,10 +15,10 @@ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ #include #include -#include #include #include +#include #include #include #include @@ -28,7 +28,7 @@ using namespace cv; namespace br { -class CascadeResourceMaker : public ResourceMaker +class CascadeResourceMaker : public ResourceMaker<_CascadeClassifier> { QString file; @@ -49,9 +49,9 @@ public: } private: - CascadeClassifier *make() const + _CascadeClassifier *make() const { - CascadeClassifier *cascade = new CascadeClassifier(); + _CascadeClassifier *cascade = new _CascadeClassifier(); if (!cascade->load(file.toStdString())) qFatal("Failed to load: %s", qPrintable(file)); return cascade; @@ -77,7 +77,7 @@ class CascadeTransform : public UntrainableMetaTransform BR_PROPERTY(int, minNeighbors, 5) BR_PROPERTY(bool, ROCMode, false) - Resource cascadeResource; + Resource<_CascadeClassifier> cascadeResource; void init() { @@ -95,7 +95,7 @@ class CascadeTransform : public UntrainableMetaTransform void project(const TemplateList &src, TemplateList &dst) const { - CascadeClassifier *cascade = cascadeResource.acquire(); + _CascadeClassifier *cascade = cascadeResource.acquire(); foreach (const Template &t, src) { const bool enrollAll = t.file.getBool("enrollAll"); -- libgit2 0.21.4