Commit 68cbcbe1ebbe4e109445d8cb0bd5b84bb3daf11f

Authored by Jordan Cheney
1 parent b277fd20

Front end copied, cleaned up (a little) and working

openbr/core/cascade.cpp 0 → 100644
  1 +#include "cascade.h"
  2 +
  3 +using namespace br;
  4 +
  5 +void br::groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vector<int>* weights, vector<double>* levelWeights)
  6 +{
  7 + if( groupThreshold <= 0 || rectList.empty() )
  8 + {
  9 + if( weights )
  10 + {
  11 + size_t i, sz = rectList.size();
  12 + weights->resize(sz);
  13 + for( i = 0; i < sz; i++ )
  14 + (*weights)[i] = 1;
  15 + }
  16 + return;
  17 + }
  18 +
  19 + vector<int> labels;
  20 + int nclasses = partition(rectList, labels, SimilarRects(eps));
  21 +
  22 + vector<Rect> rrects(nclasses);
  23 + vector<int> rweights(nclasses, 0);
  24 + vector<int> rejectLevels(nclasses, 0);
  25 + vector<double> rejectWeights(nclasses, DBL_MIN);
  26 + int i, j, nlabels = (int)labels.size();
  27 + for( i = 0; i < nlabels; i++ )
  28 + {
  29 + int cls = labels[i];
  30 + rrects[cls].x += rectList[i].x;
  31 + rrects[cls].y += rectList[i].y;
  32 + rrects[cls].width += rectList[i].width;
  33 + rrects[cls].height += rectList[i].height;
  34 + rweights[cls]++;
  35 + }
  36 + if ( levelWeights && weights && !weights->empty() && !levelWeights->empty() )
  37 + {
  38 + for( i = 0; i < nlabels; i++ )
  39 + {
  40 + int cls = labels[i];
  41 + if( (*weights)[i] > rejectLevels[cls] )
  42 + {
  43 + rejectLevels[cls] = (*weights)[i];
  44 + rejectWeights[cls] = (*levelWeights)[i];
  45 + }
  46 + else if( ( (*weights)[i] == rejectLevels[cls] ) && ( (*levelWeights)[i] > rejectWeights[cls] ) )
  47 + rejectWeights[cls] = (*levelWeights)[i];
  48 + }
  49 + }
  50 +
  51 + for( i = 0; i < nclasses; i++ )
  52 + {
  53 + Rect r = rrects[i];
  54 + float s = 1.f/rweights[i];
  55 + rrects[i] = Rect(saturate_cast<int>(r.x*s),
  56 + saturate_cast<int>(r.y*s),
  57 + saturate_cast<int>(r.width*s),
  58 + saturate_cast<int>(r.height*s));
  59 + }
  60 +
  61 + rectList.clear();
  62 + if( weights )
  63 + weights->clear();
  64 + if( levelWeights )
  65 + levelWeights->clear();
  66 +
  67 + for( i = 0; i < nclasses; i++ )
  68 + {
  69 + Rect r1 = rrects[i];
  70 + int n1 = levelWeights ? rejectLevels[i] : rweights[i];
  71 +
  72 + double w1 = rejectWeights[i];
  73 + if( n1 <= groupThreshold )
  74 + continue;
  75 + // filter out small face rectangles inside large rectangles
  76 + for( j = 0; j < nclasses; j++ )
  77 + {
  78 + int n2 = rweights[j];
  79 +
  80 + if( j == i || n2 <= groupThreshold )
  81 + continue;
  82 + Rect r2 = rrects[j];
  83 +
  84 + int dx = saturate_cast<int>( r2.width * eps );
  85 + int dy = saturate_cast<int>( r2.height * eps );
  86 +
  87 + if( i != j &&
  88 + r1.x >= r2.x - dx &&
  89 + r1.y >= r2.y - dy &&
  90 + r1.x + r1.width <= r2.x + r2.width + dx &&
  91 + r1.y + r1.height <= r2.y + r2.height + dy &&
  92 + (n2 > std::max(3, n1) || n1 < 3) )
  93 + break;
  94 + }
  95 +
  96 + if( j == nclasses )
  97 + {
  98 + rectList.push_back(r1);
  99 + if( weights )
  100 + weights->push_back(n1);
  101 + if( levelWeights )
  102 + levelWeights->push_back(w1);
  103 + }
  104 + }
  105 +}
  106 +
  107 +void br::groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps)
  108 +{
  109 + groupRectangles(rectList, groupThreshold, eps, 0, 0);
  110 +}
  111 +
  112 +void br::groupRectangles(vector<Rect>& rectList, vector<int>& weights, int groupThreshold, double eps)
  113 +{
  114 + groupRectangles(rectList, groupThreshold, eps, &weights, 0);
  115 +}
  116 +//used for cascade detection algorithm for ROC-curve calculating
  117 +void br::groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vector<double>& levelWeights, int groupThreshold, double eps)
  118 +{
  119 + groupRectangles(rectList, groupThreshold, eps, &rejectLevels, &levelWeights);
  120 +}
  121 +
  122 +bool _FeatureEvaluator::Feature::read(const FileNode& node )
  123 +{
  124 + FileNode rnode = node[CC_RECT];
  125 + FileNodeIterator it = rnode.begin();
  126 + it >> rect.x >> rect.y >> rect.width >> rect.height;
  127 + return true;
  128 +}
  129 +
  130 +bool _FeatureEvaluator::read( const FileNode& node )
  131 +{
  132 + features->resize(node.size());
  133 + featuresPtr = &(*features)[0];
  134 + FileNodeIterator it = node.begin(), it_end = node.end();
  135 + for(int i = 0; it != it_end; ++it, i++)
  136 + {
  137 + if(!featuresPtr[i].read(*it))
  138 + return false;
  139 + }
  140 + return true;
  141 +}
  142 +
  143 +bool _FeatureEvaluator::setImage( const Mat& image, Size _origWinSize )
  144 +{
  145 + int rn = image.rows+1, cn = image.cols+1;
  146 + origWinSize = _origWinSize;
  147 +
  148 + if( image.cols < origWinSize.width || image.rows < origWinSize.height )
  149 + return false;
  150 +
  151 + if( sum0.rows < rn || sum0.cols < cn )
  152 + sum0.create(rn, cn, CV_32S);
  153 + sum = Mat(rn, cn, CV_32S, sum0.data);
  154 + integral(image, sum);
  155 +
  156 + size_t fi, nfeatures = features->size();
  157 +
  158 + for( fi = 0; fi < nfeatures; fi++ )
  159 + featuresPtr[fi].updatePtrs( sum );
  160 + return true;
  161 +}
  162 +
  163 +bool _FeatureEvaluator::setWindow( Point pt )
  164 +{
  165 + if( pt.x < 0 || pt.y < 0 ||
  166 + pt.x + origWinSize.width >= sum.cols ||
  167 + pt.y + origWinSize.height >= sum.rows )
  168 + return false;
  169 + offset = pt.y * ((int)sum.step/sizeof(int)) + pt.x;
  170 + return true;
  171 +}
  172 +
  173 +// --------------------------------- Cascade Classifier ----------------------------------
  174 +
  175 +bool _CascadeClassifier::load(const string& filename)
  176 +{
  177 + data = Data();
  178 + featureEvaluator.release();
  179 +
  180 + FileStorage fs(filename, FileStorage::READ);
  181 + if( !fs.isOpened() )
  182 + return false;
  183 +
  184 + if( read(fs.getFirstTopLevelNode()) )
  185 + return true;
  186 +
  187 + return false;
  188 +}
  189 +
  190 +bool _CascadeClassifier::read(const FileNode& root)
  191 +{
  192 + if( !data.read(root) )
  193 + return false;
  194 +
  195 + // load features
  196 + featureEvaluator = Ptr<_FeatureEvaluator>(new _FeatureEvaluator());
  197 + FileNode fn = root[CC_FEATURES];
  198 + if( fn.empty() )
  199 + return false;
  200 +
  201 + return featureEvaluator->read(fn);
  202 +}
  203 +
  204 +int _CascadeClassifier::runAt(Point pt, double& weight)
  205 +{
  206 + if( !featureEvaluator->setWindow(pt) )
  207 + return -1;
  208 +
  209 + if( data.isStumpBased )
  210 + return predictCategoricalStump<_FeatureEvaluator>( *this, featureEvaluator, weight );
  211 + return predictCategorical<_FeatureEvaluator>( *this, featureEvaluator, weight );
  212 +}
  213 +
  214 +void _CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
  215 + vector<int>& rejectLevels,
  216 + vector<double>& levelWeights,
  217 + double scaleFactor, int minNeighbors,
  218 + int flags, Size minObjectSize, Size maxObjectSize,
  219 + bool outputRejectLevels )
  220 +{
  221 + const double GROUP_EPS = 0.2;
  222 +
  223 + CV_Assert( scaleFactor > 1 && image.depth() == CV_8U );
  224 +
  225 + if (data.stages.empty())
  226 + return;
  227 +
  228 + if( maxObjectSize.height == 0 || maxObjectSize.width == 0 )
  229 + maxObjectSize = image.size();
  230 +
  231 + Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U);
  232 +
  233 + for (double factor = 1; ; factor *= scaleFactor) {
  234 + Size originalWindowSize = data.origWinSize;
  235 +
  236 + Size windowSize(cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) );
  237 + Size scaledImageSize(cvRound(image.cols/factor ), cvRound(image.rows/factor));
  238 + Size processingRectSize(scaledImageSize.width - originalWindowSize.width, scaledImageSize.height - originalWindowSize.height);
  239 +
  240 + if (processingRectSize.width <= 0 || processingRectSize.height <= 0)
  241 + break;
  242 + if (windowSize.width > maxObjectSize.width || windowSize.height > maxObjectSize.height)
  243 + break;
  244 + if (windowSize.width < minObjectSize.width || windowSize.height < minObjectSize.height)
  245 + continue;
  246 +
  247 + Mat scaledImage(scaledImageSize, CV_8U, imageBuffer.data);
  248 + resize(image, scaledImage, scaledImageSize, 0, 0, CV_INTER_LINEAR);
  249 + if (!featureEvaluator->setImage(scaledImage, originalWindowSize))
  250 + qFatal("Couldn't set the image");
  251 +
  252 + int yStep = factor > 2. ? 1 : 2;
  253 + for (int y = 0; y < processingRectSize.height; y += yStep) {
  254 + for (int x = 0; x < processingRectSize.width; x += yStep) {
  255 + double gypWeight;
  256 + int result = runAt(Point(x, y), gypWeight);
  257 +
  258 + if (outputRejectLevels) {
  259 + if (result == 1)
  260 + result = -(int)data.stages.size();
  261 + if (data.stages.size() + result < 4) {
  262 + objects.push_back(Rect(cvRound(x*factor), cvRound(y*factor), windowSize.width, windowSize.height));
  263 + rejectLevels.push_back(-result);
  264 + levelWeights.push_back(gypWeight);
  265 + }
  266 + }
  267 + else if (result > 0) {
  268 + objects.push_back(Rect(cvRound(x*factor), cvRound(y*factor), windowSize.width, windowSize.height));
  269 + }
  270 + if (result == 0)
  271 + x += yStep;
  272 + }
  273 + }
  274 + }
  275 +
  276 + if (outputRejectLevels)
  277 + groupRectangles(objects, rejectLevels, levelWeights, minNeighbors, GROUP_EPS);
  278 + else
  279 + groupRectangles(objects, minNeighbors, GROUP_EPS);
  280 +}
  281 +
  282 +void _CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
  283 + double scaleFactor, int minNeighbors,
  284 + int flags, Size minObjectSize, Size maxObjectSize)
  285 +{
  286 + vector<int> fakeLevels;
  287 + vector<double> fakeWeights;
  288 + detectMultiScale( image, objects, fakeLevels, fakeWeights, scaleFactor,
  289 + minNeighbors, flags, minObjectSize, maxObjectSize, false );
  290 +}
  291 +
  292 +bool _CascadeClassifier::Data::read(const FileNode &root)
  293 +{
  294 + static const float THRESHOLD_EPS = 1e-5f;
  295 +
  296 + // load stage params
  297 + string stageTypeStr = (string)root[CC_STAGE_TYPE];
  298 + if( stageTypeStr == CC_BOOST )
  299 + stageType = BOOST;
  300 + else
  301 + return false;
  302 +
  303 + featureType = _FeatureEvaluator::LBP;
  304 +
  305 + origWinSize.width = (int)root[CC_WIDTH];
  306 + origWinSize.height = (int)root[CC_HEIGHT];
  307 + CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 );
  308 +
  309 + isStumpBased = (int)(root[CC_STAGE_PARAMS][CC_MAX_DEPTH]) == 1 ? true : false;
  310 +
  311 + // load feature params
  312 + FileNode fn = root[CC_FEATURE_PARAMS];
  313 + if( fn.empty() )
  314 + return false;
  315 +
  316 + ncategories = fn[CC_MAX_CAT_COUNT];
  317 + int subsetSize = (ncategories + 31)/32,
  318 + nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 );
  319 +
  320 + // load stages
  321 + fn = root[CC_STAGES];
  322 + if( fn.empty() )
  323 + return false;
  324 +
  325 + stages.reserve(fn.size());
  326 + classifiers.clear();
  327 + nodes.clear();
  328 +
  329 + FileNodeIterator it = fn.begin(), it_end = fn.end();
  330 +
  331 + for( int si = 0; it != it_end; si++, ++it )
  332 + {
  333 + FileNode fns = *it;
  334 + Stage stage;
  335 + stage.threshold = (float)fns[CC_STAGE_THRESHOLD] - THRESHOLD_EPS;
  336 + fns = fns[CC_WEAK_CLASSIFIERS];
  337 + if(fns.empty())
  338 + return false;
  339 + stage.ntrees = (int)fns.size();
  340 + stage.first = (int)classifiers.size();
  341 + stages.push_back(stage);
  342 + classifiers.reserve(stages[si].first + stages[si].ntrees);
  343 +
  344 + FileNodeIterator it1 = fns.begin(), it1_end = fns.end();
  345 + for( ; it1 != it1_end; ++it1 ) // weak trees
  346 + {
  347 + FileNode fnw = *it1;
  348 + FileNode internalNodes = fnw[CC_INTERNAL_NODES];
  349 + FileNode leafValues = fnw[CC_LEAF_VALUES];
  350 + if( internalNodes.empty() || leafValues.empty() )
  351 + return false;
  352 +
  353 + DTree tree;
  354 + tree.nodeCount = (int)internalNodes.size()/nodeStep;
  355 + classifiers.push_back(tree);
  356 +
  357 + nodes.reserve(nodes.size() + tree.nodeCount);
  358 + leaves.reserve(leaves.size() + leafValues.size());
  359 + if( subsetSize > 0 )
  360 + subsets.reserve(subsets.size() + tree.nodeCount*subsetSize);
  361 +
  362 + FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end();
  363 +
  364 + for( ; internalNodesIter != internalNodesEnd; ) // nodes
  365 + {
  366 + DTreeNode node;
  367 + node.left = (int)*internalNodesIter; ++internalNodesIter;
  368 + node.right = (int)*internalNodesIter; ++internalNodesIter;
  369 + node.featureIdx = (int)*internalNodesIter; ++internalNodesIter;
  370 + if( subsetSize > 0 )
  371 + {
  372 + for( int j = 0; j < subsetSize; j++, ++internalNodesIter )
  373 + subsets.push_back((int)*internalNodesIter);
  374 + node.threshold = 0.f;
  375 + }
  376 + else
  377 + {
  378 + node.threshold = (float)*internalNodesIter; ++internalNodesIter;
  379 + }
  380 + nodes.push_back(node);
  381 + }
  382 +
  383 + internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end();
  384 +
  385 + for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves
  386 + leaves.push_back((float)*internalNodesIter);
  387 + }
  388 + }
  389 + return true;
  390 +}
... ...
openbr/core/cascade.h 0 → 100644
  1 +#ifndef CASCADE_H
  2 +#define CASCADE_H
  3 +
  4 +#include <openbr/openbr_plugin.h>
  5 +#include <opencv2/imgproc/imgproc.hpp>
  6 +
  7 +#define CC_CASCADE_PARAMS "cascadeParams"
  8 +#define CC_STAGE_TYPE "stageType"
  9 +#define CC_FEATURE_TYPE "featureType"
  10 +#define CC_HEIGHT "height"
  11 +#define CC_WIDTH "width"
  12 +
  13 +#define CC_STAGE_NUM "stageNum"
  14 +#define CC_STAGES "stages"
  15 +#define CC_STAGE_PARAMS "stageParams"
  16 +
  17 +#define CC_BOOST "BOOST"
  18 +#define CC_MAX_DEPTH "maxDepth"
  19 +#define CC_WEAK_COUNT "maxWeakCount"
  20 +#define CC_STAGE_THRESHOLD "stageThreshold"
  21 +#define CC_WEAK_CLASSIFIERS "weakClassifiers"
  22 +#define CC_INTERNAL_NODES "internalNodes"
  23 +#define CC_LEAF_VALUES "leafValues"
  24 +
  25 +#define CC_FEATURES "features"
  26 +#define CC_FEATURE_PARAMS "featureParams"
  27 +#define CC_MAX_CAT_COUNT "maxCatCount"
  28 +
  29 +#define CC_HAAR "HAAR"
  30 +#define CC_RECTS "rects"
  31 +#define CC_TILTED "tilted"
  32 +
  33 +#define CC_LBP "LBP"
  34 +#define CC_RECT "rect"
  35 +
  36 +#define CC_HOG "HOG"
  37 +#define CC_HOGMulti "HOGMulti"
  38 +
  39 +#define CC_NPD "NPD"
  40 +#define CC_POINTS "points"
  41 +
  42 +#define CV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \
  43 + /* (x, y) */ \
  44 + (p0) = sum + (rect).x + (step) * (rect).y, \
  45 + /* (x + w, y) */ \
  46 + (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \
  47 + /* (x, y + h) */ \
  48 + (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \
  49 + /* (x + w, y + h) */ \
  50 + (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height)
  51 +
  52 +#define CALC_SUM_(p0, p1, p2, p3, offset) \
  53 + ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset])
  54 +
  55 +#define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset)
  56 +
  57 +
  58 +using namespace std;
  59 +using namespace cv;
  60 +
  61 +namespace br
  62 +{
  63 +
  64 +// class for grouping object candidates, detected by Cascade Classifier, HOG etc.
  65 +// instance of the class is to be passed to cv::partition (see cxoperations.hpp)
  66 +class SimilarRects
  67 +{
  68 +public:
  69 + SimilarRects(double _eps) : eps(_eps) {}
  70 + inline bool operator()(const Rect& r1, const Rect& r2) const
  71 + {
  72 + double delta = eps*(std::min(r1.width, r2.width) + std::min(r1.height, r2.height))*0.5;
  73 + return std::abs(r1.x - r2.x) <= delta &&
  74 + std::abs(r1.y - r2.y) <= delta &&
  75 + std::abs(r1.x + r1.width - r2.x - r2.width) <= delta &&
  76 + std::abs(r1.y + r1.height - r2.y - r2.height) <= delta;
  77 + }
  78 + double eps;
  79 +};
  80 +
  81 +void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps=0.2);
  82 +void groupRectangles(vector<Rect>& rectList, vector<int>& weights, int groupThreshold, double eps=0.2);
  83 +void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vector<int>* weights, vector<double>* levelWeights );
  84 +void groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vector<double>& levelWeights, int groupThreshold, double eps=0.2);
  85 +
  86 +class _FeatureEvaluator
  87 +{
  88 +public:
  89 + enum { LBP = 0 };
  90 +
  91 + _FeatureEvaluator() : features(new vector<Feature>()) {}
  92 + virtual ~_FeatureEvaluator() {}
  93 +
  94 + virtual bool read( const FileNode& node );
  95 +
  96 + virtual bool setImage(const Mat& image, Size _origWinSize);
  97 + virtual bool setWindow(Point pt);
  98 +
  99 + int operator()(int featureIdx) const { return featuresPtr[featureIdx].calc(offset); }
  100 + virtual int calcCat(int featureIdx) const { return (*this)(featureIdx); }
  101 +
  102 +protected:
  103 + struct Feature
  104 + {
  105 + Feature();
  106 + Feature( int x, int y, int _block_w, int _block_h ) :
  107 + rect(x, y, _block_w, _block_h) {}
  108 +
  109 + int calc( int offset ) const;
  110 + void updatePtrs( const Mat& sum );
  111 + bool read(const FileNode& node );
  112 +
  113 + Rect rect; // weight and height for block
  114 + const int* p[16]; // fast
  115 + };
  116 +
  117 + Size origWinSize;
  118 + Ptr<vector<Feature> > features;
  119 + Feature* featuresPtr; // optimization
  120 + Mat sum0, sum, window;
  121 + Rect normrect;
  122 +
  123 + int offset;
  124 +};
  125 +
  126 +inline _FeatureEvaluator::Feature::Feature()
  127 +{
  128 + rect = Rect();
  129 + for( int i = 0; i < 16; i++ )
  130 + p[i] = 0;
  131 +}
  132 +
  133 +inline int _FeatureEvaluator::Feature::calc( int _offset ) const
  134 +{
  135 + int cval = CALC_SUM_( p[5], p[6], p[9], p[10], _offset );
  136 +
  137 + return (CALC_SUM_( p[0], p[1], p[4], p[5], _offset ) >= cval ? 128 : 0) | // 0
  138 + (CALC_SUM_( p[1], p[2], p[5], p[6], _offset ) >= cval ? 64 : 0) | // 1
  139 + (CALC_SUM_( p[2], p[3], p[6], p[7], _offset ) >= cval ? 32 : 0) | // 2
  140 + (CALC_SUM_( p[6], p[7], p[10], p[11], _offset ) >= cval ? 16 : 0) | // 5
  141 + (CALC_SUM_( p[10], p[11], p[14], p[15], _offset ) >= cval ? 8 : 0)| // 8
  142 + (CALC_SUM_( p[9], p[10], p[13], p[14], _offset ) >= cval ? 4 : 0)| // 7
  143 + (CALC_SUM_( p[8], p[9], p[12], p[13], _offset ) >= cval ? 2 : 0)| // 6
  144 + (CALC_SUM_( p[4], p[5], p[8], p[9], _offset ) >= cval ? 1 : 0);
  145 +}
  146 +
  147 +inline void _FeatureEvaluator::Feature::updatePtrs( const Mat& _sum )
  148 +{
  149 + const int* ptr = (const int*)_sum.data;
  150 + size_t step = _sum.step/sizeof(ptr[0]);
  151 + Rect tr = rect;
  152 + CV_SUM_PTRS( p[0], p[1], p[4], p[5], ptr, tr, step );
  153 + tr.x += 2*rect.width;
  154 + CV_SUM_PTRS( p[2], p[3], p[6], p[7], ptr, tr, step );
  155 + tr.y += 2*rect.height;
  156 + CV_SUM_PTRS( p[10], p[11], p[14], p[15], ptr, tr, step );
  157 + tr.x -= 2*rect.width;
  158 + CV_SUM_PTRS( p[8], p[9], p[12], p[13], ptr, tr, step );
  159 +}
  160 +
  161 +enum
  162 +{
  163 + CASCADE_DO_CANNY_PRUNING=1,
  164 + CASCADE_SCALE_IMAGE=2,
  165 + CASCADE_FIND_BIGGEST_OBJECT=4,
  166 + CASCADE_DO_ROUGH_SEARCH=8
  167 +};
  168 +
  169 +class _CascadeClassifier
  170 +{
  171 +public:
  172 + _CascadeClassifier() {}
  173 + _CascadeClassifier( const string& filename ) { load(filename); }
  174 + ~_CascadeClassifier() {}
  175 +
  176 + bool load( const string& filename );
  177 + bool read( const FileNode& node );
  178 + void detectMultiScale( const Mat& image,
  179 + vector<Rect>& objects,
  180 + double scaleFactor=1.1,
  181 + int minNeighbors=3, int flags=0,
  182 + Size minSize=Size(),
  183 + Size maxSize=Size() );
  184 +
  185 + void detectMultiScale( const Mat& image,
  186 + vector<Rect>& objects,
  187 + vector<int>& rejectLevels,
  188 + vector<double>& levelWeights,
  189 + double scaleFactor=1.1,
  190 + int minNeighbors=3, int flags=0,
  191 + Size minSize=Size(),
  192 + Size maxSize=Size(),
  193 + bool outputRejectLevels=false );
  194 +
  195 + enum { BOOST = 0 };
  196 + enum { DO_CANNY_PRUNING = 1, SCALE_IMAGE = 2,
  197 + FIND_BIGGEST_OBJECT = 4, DO_ROUGH_SEARCH = 8 };
  198 +
  199 + template<class FEval>
  200 + friend int predictOrdered( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight);
  201 +
  202 + template<class FEval>
  203 + friend int predictCategorical( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight);
  204 +
  205 + template<class FEval>
  206 + friend int predictOrderedStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight);
  207 +
  208 + template<class FEval>
  209 + friend int predictCategoricalStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight);
  210 +
  211 + virtual int runAt(Point pt, double& weight );
  212 +
  213 + class Data
  214 + {
  215 + public:
  216 + struct DTreeNode
  217 + {
  218 + int featureIdx;
  219 + float threshold; // for ordered features only
  220 + int left;
  221 + int right;
  222 + };
  223 +
  224 + struct DTree
  225 + {
  226 + int nodeCount;
  227 + };
  228 +
  229 + struct Stage
  230 + {
  231 + int first;
  232 + int ntrees;
  233 + float threshold;
  234 + };
  235 +
  236 + bool read(const FileNode &node);
  237 +
  238 + bool isStumpBased;
  239 +
  240 + int stageType;
  241 + int featureType;
  242 + int ncategories;
  243 + Size origWinSize;
  244 +
  245 + vector<Stage> stages;
  246 + vector<DTree> classifiers;
  247 + vector<DTreeNode> nodes;
  248 + vector<float> leaves;
  249 + vector<int> subsets;
  250 + };
  251 +
  252 + Data data;
  253 + Ptr<_FeatureEvaluator> featureEvaluator;
  254 +};
  255 +
  256 +template<class FEval>
  257 +inline int predictOrdered( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum )
  258 +{
  259 + int nstages = (int)cascade.data.stages.size();
  260 + int nodeOfs = 0, leafOfs = 0;
  261 + FEval& featureEvaluator = (FEval&)*_featureEvaluator;
  262 + float* cascadeLeaves = &cascade.data.leaves[0];
  263 + _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
  264 + _CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0];
  265 + _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
  266 +
  267 + for( int si = 0; si < nstages; si++ )
  268 + {
  269 + _CascadeClassifier::Data::Stage& stage = cascadeStages[si];
  270 + int wi, ntrees = stage.ntrees;
  271 + sum = 0;
  272 +
  273 + for( wi = 0; wi < ntrees; wi++ )
  274 + {
  275 + _CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi];
  276 + int idx = 0, root = nodeOfs;
  277 +
  278 + do
  279 + {
  280 + _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx];
  281 + double val = featureEvaluator(node.featureIdx);
  282 + idx = val < node.threshold ? node.left : node.right;
  283 + }
  284 + while( idx > 0 );
  285 + sum += cascadeLeaves[leafOfs - idx];
  286 + nodeOfs += weak.nodeCount;
  287 + leafOfs += weak.nodeCount + 1;
  288 + }
  289 + if( sum < stage.threshold )
  290 + return -si;
  291 + }
  292 + return 1;
  293 +}
  294 +
  295 +template<class FEval>
  296 +inline int predictCategorical( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum )
  297 +{
  298 + int nstages = (int)cascade.data.stages.size();
  299 + int nodeOfs = 0, leafOfs = 0;
  300 + FEval& featureEvaluator = (FEval&)*_featureEvaluator;
  301 + size_t subsetSize = (cascade.data.ncategories + 31)/32;
  302 + int* cascadeSubsets = &cascade.data.subsets[0];
  303 + float* cascadeLeaves = &cascade.data.leaves[0];
  304 + _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
  305 + _CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0];
  306 + _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
  307 +
  308 + for(int si = 0; si < nstages; si++ )
  309 + {
  310 + _CascadeClassifier::Data::Stage& stage = cascadeStages[si];
  311 + int wi, ntrees = stage.ntrees;
  312 + sum = 0;
  313 +
  314 + for( wi = 0; wi < ntrees; wi++ )
  315 + {
  316 + _CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi];
  317 + int idx = 0, root = nodeOfs;
  318 + do
  319 + {
  320 + _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx];
  321 + int c = featureEvaluator(node.featureIdx);
  322 + const int* subset = &cascadeSubsets[(root + idx)*subsetSize];
  323 + idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right;
  324 + }
  325 + while( idx > 0 );
  326 + sum += cascadeLeaves[leafOfs - idx];
  327 + nodeOfs += weak.nodeCount;
  328 + leafOfs += weak.nodeCount + 1;
  329 + }
  330 + if( sum < stage.threshold )
  331 + return -si;
  332 + }
  333 + return 1;
  334 +}
  335 +
  336 +template<class FEval>
  337 +inline int predictOrderedStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum )
  338 +{
  339 + int nodeOfs = 0, leafOfs = 0;
  340 + FEval& featureEvaluator = (FEval&)*_featureEvaluator;
  341 + float* cascadeLeaves = &cascade.data.leaves[0];
  342 + _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
  343 + _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
  344 +
  345 + int nstages = (int)cascade.data.stages.size();
  346 + for( int stageIdx = 0; stageIdx < nstages; stageIdx++ )
  347 + {
  348 + _CascadeClassifier::Data::Stage& stage = cascadeStages[stageIdx];
  349 + sum = 0.0;
  350 +
  351 + int ntrees = stage.ntrees;
  352 + for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 )
  353 + {
  354 + _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs];
  355 + double value = featureEvaluator(node.featureIdx);
  356 + sum += cascadeLeaves[ value < node.threshold ? leafOfs : leafOfs + 1 ];
  357 + }
  358 +
  359 + if( sum < stage.threshold )
  360 + return -stageIdx;
  361 + }
  362 +
  363 + return 1;
  364 +}
  365 +
  366 +template<class FEval>
  367 +inline int predictCategoricalStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum )
  368 +{
  369 + int nstages = (int)cascade.data.stages.size();
  370 + int nodeOfs = 0, leafOfs = 0;
  371 + FEval& featureEvaluator = (FEval&)*_featureEvaluator;
  372 + size_t subsetSize = (cascade.data.ncategories + 31)/32;
  373 + int* cascadeSubsets = &cascade.data.subsets[0];
  374 + float* cascadeLeaves = &cascade.data.leaves[0];
  375 + _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
  376 + _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
  377 +
  378 + for( int si = 0; si < nstages; si++ )
  379 + {
  380 + _CascadeClassifier::Data::Stage& stage = cascadeStages[si];
  381 + int wi, ntrees = stage.ntrees;
  382 +
  383 + sum = 0;
  384 +
  385 + for( wi = 0; wi < ntrees; wi++ )
  386 + {
  387 + _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs];
  388 + int c = featureEvaluator(node.featureIdx);
  389 + const int* subset = &cascadeSubsets[nodeOfs*subsetSize];
  390 +
  391 + sum += cascadeLeaves[ subset[c>>5] & (1 << (c & 31)) ? leafOfs : leafOfs+1];
  392 +
  393 + nodeOfs++;
  394 + leafOfs += 2;
  395 + }
  396 +
  397 + if( sum < stage.threshold )
  398 + return -si;
  399 + }
  400 +
  401 + return 1;
  402 +}
  403 +
  404 +} // namespace br
  405 +
  406 +#endif // CASCADE_H
... ...
openbr/plugins/metadata/cascade.cpp
... ... @@ -15,10 +15,10 @@
15 15 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
16 16 #include <QProcess>
17 17 #include <QTemporaryFile>
18   -#include <opencv2/objdetect/objdetect.hpp>
19 18 #include <fstream>
20 19  
21 20 #include <openbr/plugins/openbr_internal.h>
  21 +#include <openbr/core/cascade.h>
22 22 #include <openbr/core/opencvutils.h>
23 23 #include <openbr/core/resource.h>
24 24 #include <openbr/core/qtutils.h>
... ... @@ -28,7 +28,7 @@ using namespace cv;
28 28 namespace br
29 29 {
30 30  
31   -class CascadeResourceMaker : public ResourceMaker<CascadeClassifier>
  31 +class CascadeResourceMaker : public ResourceMaker<_CascadeClassifier>
32 32 {
33 33 QString file;
34 34  
... ... @@ -49,9 +49,9 @@ public:
49 49 }
50 50  
51 51 private:
52   - CascadeClassifier *make() const
  52 + _CascadeClassifier *make() const
53 53 {
54   - CascadeClassifier *cascade = new CascadeClassifier();
  54 + _CascadeClassifier *cascade = new _CascadeClassifier();
55 55 if (!cascade->load(file.toStdString()))
56 56 qFatal("Failed to load: %s", qPrintable(file));
57 57 return cascade;
... ... @@ -77,7 +77,7 @@ class CascadeTransform : public UntrainableMetaTransform
77 77 BR_PROPERTY(int, minNeighbors, 5)
78 78 BR_PROPERTY(bool, ROCMode, false)
79 79  
80   - Resource<CascadeClassifier> cascadeResource;
  80 + Resource<_CascadeClassifier> cascadeResource;
81 81  
82 82 void init()
83 83 {
... ... @@ -95,7 +95,7 @@ class CascadeTransform : public UntrainableMetaTransform
95 95  
96 96 void project(const TemplateList &src, TemplateList &dst) const
97 97 {
98   - CascadeClassifier *cascade = cascadeResource.acquire();
  98 + _CascadeClassifier *cascade = cascadeResource.acquire();
99 99 foreach (const Template &t, src) {
100 100 const bool enrollAll = t.file.getBool("enrollAll");
101 101  
... ...