Commit d4a5b59c31c2b0d89dce8446b4a0eb5e0964c5e4
1 parent
68cbcbe1
Frontend uses representations
Showing
9 changed files
with
149 additions
and
455 deletions
openbr/core/boost.cpp
| ... | ... | @@ -826,7 +826,80 @@ CvDTreeNode* CascadeBoostTree::predict( int sampleIdx ) const |
| 826 | 826 | return node; |
| 827 | 827 | } |
| 828 | 828 | |
| 829 | -void CascadeBoostTree::write( FileStorage &fs, const Mat& featureMap ) | |
| 829 | +/* | |
| 830 | +static void writeRecursive(FileStorage &fs, CvDTreeNode *node, int maxCatCount) | |
| 831 | +{ | |
| 832 | + bool hasChildren = node->left ? true : false; | |
| 833 | + fs << "hasChildren" << hasChildren; | |
| 834 | + | |
| 835 | + if (!hasChildren) // Write the leaf value | |
| 836 | + fs << "value" << node->value; // value of the node. Only relevant for leaf nodes | |
| 837 | + else { // Write the splitting information and then the children | |
| 838 | + if (maxCatCount > 0) { | |
| 839 | + fs << "subset" << "[:"; | |
| 840 | + for (int i = 0; i < ((maxCatCount + 31) / 32); i++) | |
| 841 | + fs << node->split->subset[i]; // subset to split on (categorical features) | |
| 842 | + fs << "]"; | |
| 843 | + } else { | |
| 844 | + fs << "threshold" << node->split->ord.c; // threshold to split on (ordered features) | |
| 845 | + } | |
| 846 | + | |
| 847 | + fs << "feature_idx" << node->split->var_idx; // feature idx of node | |
| 848 | + | |
| 849 | + fs << "left" << "{"; writeRecursive(fs, node->left, maxCatCount); fs << "}"; // write left child | |
| 850 | + fs << "right" << "{"; writeRecursive(fs, node->right, maxCatCount); fs << "}"; // write right child | |
| 851 | + } | |
| 852 | +} | |
| 853 | + | |
| 854 | +void CascadeBoostTree::write(FileStorage &fs) | |
| 855 | +{ | |
| 856 | + fs << "{"; | |
| 857 | + writeRecursive(fs, root, ((CascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount()); | |
| 858 | + fs << "}"; | |
| 859 | +} | |
| 860 | + | |
| 861 | +static void readRecursive(const FileNode &fn, CvDTreeNode *node, CvDTreeTrainData *data) | |
| 862 | +{ | |
| 863 | + bool hasChildren = (int)fn["hasChildren"]; | |
| 864 | + | |
| 865 | + if (!hasChildren) | |
| 866 | + node->value = (float)fn["value"]; | |
| 867 | + else { | |
| 868 | + int maxCatCount = ((CascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount(); | |
| 869 | + if (maxCatCount > 0) { | |
| 870 | + node->split = data->new_split_cat(0, 0); | |
| 871 | + FileNode subset_node = fn["subset"]; FileNodeIterator subset_it = subset_node.begin(); | |
| 872 | + for (int i = 0; i < (maxCatCount + 31) / 32; i++, ++subset_it) | |
| 873 | + node->split->subset[i] = (int)*subset_it; | |
| 874 | + } else { | |
| 875 | + float threshold = (float)fn["threshold"]; | |
| 876 | + node->split = data->new_split_ord(0, threshold, 0, 0, 0); | |
| 877 | + } | |
| 878 | + | |
| 879 | + node->split->var_idx = (int)fn["feature_idx"]; | |
| 880 | + | |
| 881 | + CvDTreeNode *leftChild = data->new_node(node, 0, 0, 0); | |
| 882 | + node->left = leftChild; | |
| 883 | + readRecursive(fn["left"], leftChild, data); | |
| 884 | + | |
| 885 | + CvDTreeNode *rightChild = data->new_node(node, 0, 0, 0); | |
| 886 | + node->right = rightChild; | |
| 887 | + readRecursive(fn["right"], rightChild, data); | |
| 888 | + } | |
| 889 | +} | |
| 890 | + | |
| 891 | +void CascadeBoostTree::read(const FileNode &fn, CvBoost* _ensemble, CvDTreeTrainData* _data) | |
| 892 | +{ | |
| 893 | + clear(); | |
| 894 | + data = _data; | |
| 895 | + ensemble = _ensemble; | |
| 896 | + pruned_tree_idx = 0; | |
| 897 | + | |
| 898 | + root = data->new_node(0, 0, 0, 0); | |
| 899 | + readRecursive(fn, root, data); | |
| 900 | +}*/ | |
| 901 | + | |
| 902 | +void CascadeBoostTree::write(FileStorage &fs) | |
| 830 | 903 | { |
| 831 | 904 | int maxCatCount = ((CascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount(); |
| 832 | 905 | int subsetN = (maxCatCount + 31)/32; |
| ... | ... | @@ -868,7 +941,7 @@ void CascadeBoostTree::write( FileStorage &fs, const Mat& featureMap ) |
| 868 | 941 | fs << internalNodeIdx++; |
| 869 | 942 | } |
| 870 | 943 | int fidx = tempNode->split->var_idx; |
| 871 | - fidx = featureMap.empty() ? fidx : featureMap.at<int>(0, fidx); | |
| 944 | + | |
| 872 | 945 | fs << fidx; |
| 873 | 946 | if ( !maxCatCount ) |
| 874 | 947 | fs << tempNode->split->ord.c; |
| ... | ... | @@ -1104,21 +1177,6 @@ void CascadeBoostTree::split_node_data( CvDTreeNode* node ) |
| 1104 | 1177 | data->free_node_data(node); |
| 1105 | 1178 | } |
| 1106 | 1179 | |
| 1107 | -static void auxMarkFeaturesInMap( const CvDTreeNode* node, Mat& featureMap) | |
| 1108 | -{ | |
| 1109 | - if ( node && node->split ) | |
| 1110 | - { | |
| 1111 | - featureMap.ptr<int>(0)[node->split->var_idx] = 1; | |
| 1112 | - auxMarkFeaturesInMap( node->left, featureMap ); | |
| 1113 | - auxMarkFeaturesInMap( node->right, featureMap ); | |
| 1114 | - } | |
| 1115 | -} | |
| 1116 | - | |
| 1117 | -void CascadeBoostTree::markFeaturesInMap( Mat& featureMap ) | |
| 1118 | -{ | |
| 1119 | - auxMarkFeaturesInMap( root, featureMap ); | |
| 1120 | -} | |
| 1121 | - | |
| 1122 | 1180 | //----------------------------------- CascadeBoost -------------------------------------- |
| 1123 | 1181 | |
| 1124 | 1182 | bool CascadeBoost::train( const FeatureEvaluator* _featureEvaluator, |
| ... | ... | @@ -1484,7 +1542,7 @@ bool CascadeBoost::isErrDesired() |
| 1484 | 1542 | return falseAlarm <= maxFalseAlarm; |
| 1485 | 1543 | } |
| 1486 | 1544 | |
| 1487 | -void CascadeBoost::write( FileStorage &fs, const Mat& featureMap ) const | |
| 1545 | +void CascadeBoost::write(FileStorage &fs) const | |
| 1488 | 1546 | { |
| 1489 | 1547 | // char cmnt[30]; |
| 1490 | 1548 | CascadeBoostTree* weakTree; |
| ... | ... | @@ -1496,17 +1554,8 @@ void CascadeBoost::write( FileStorage &fs, const Mat& featureMap ) const |
| 1496 | 1554 | /*sprintf( cmnt, "tree %i", wi ); |
| 1497 | 1555 | cvWriteComment( fs, cmnt, 0 );*/ |
| 1498 | 1556 | weakTree = *((CascadeBoostTree**) cvGetSeqElem( weak, wi )); |
| 1499 | - weakTree->write( fs, featureMap ); | |
| 1557 | + weakTree->write(fs); | |
| 1500 | 1558 | } |
| 1501 | 1559 | fs << "]"; |
| 1502 | 1560 | } |
| 1503 | 1561 | |
| 1504 | -void CascadeBoost::markUsedFeaturesInMap( Mat& featureMap ) | |
| 1505 | -{ | |
| 1506 | - for( int wi = 0; wi < weak->total; wi++ ) | |
| 1507 | - { | |
| 1508 | - CascadeBoostTree* weakTree = *((CascadeBoostTree**) cvGetSeqElem( weak, wi )); | |
| 1509 | - weakTree->markFeaturesInMap( featureMap ); | |
| 1510 | - } | |
| 1511 | -} | |
| 1512 | - | ... | ... |
openbr/core/boost.h
| ... | ... | @@ -113,8 +113,7 @@ class CascadeBoostTree : public CvBoostTree |
| 113 | 113 | { |
| 114 | 114 | public: |
| 115 | 115 | virtual CvDTreeNode* predict(int sampleIdx) const; |
| 116 | - void write(cv::FileStorage &fs, const cv::Mat& featureMap); | |
| 117 | - void markFeaturesInMap(cv::Mat& featureMap); | |
| 116 | + void write(cv::FileStorage &fs); | |
| 118 | 117 | |
| 119 | 118 | protected: |
| 120 | 119 | virtual void split_node_data(CvDTreeNode* n); |
| ... | ... | @@ -129,8 +128,7 @@ public: |
| 129 | 128 | virtual float predict( int sampleIdx, bool returnSum = false ) const; |
| 130 | 129 | |
| 131 | 130 | float getThreshold() const { return threshold; } |
| 132 | - void write(cv::FileStorage &fs, const cv::Mat& featureMap) const; | |
| 133 | - void markUsedFeaturesInMap(cv::Mat& featureMap); | |
| 131 | + void write(cv::FileStorage &fs) const; | |
| 134 | 132 | |
| 135 | 133 | protected: |
| 136 | 134 | virtual bool set_params(const CvBoostParams& _params); | ... | ... |
openbr/core/cascade.cpp
| ... | ... | @@ -113,109 +113,73 @@ void br::groupRectangles(vector<Rect>& rectList, vector<int>& weights, int group |
| 113 | 113 | { |
| 114 | 114 | groupRectangles(rectList, groupThreshold, eps, &weights, 0); |
| 115 | 115 | } |
| 116 | -//used for cascade detection algorithm for ROC-curve calculating | |
| 116 | + | |
| 117 | 117 | void br::groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vector<double>& levelWeights, int groupThreshold, double eps) |
| 118 | 118 | { |
| 119 | 119 | groupRectangles(rectList, groupThreshold, eps, &rejectLevels, &levelWeights); |
| 120 | 120 | } |
| 121 | 121 | |
| 122 | -bool _FeatureEvaluator::Feature::read(const FileNode& node ) | |
| 123 | -{ | |
| 124 | - FileNode rnode = node[CC_RECT]; | |
| 125 | - FileNodeIterator it = rnode.begin(); | |
| 126 | - it >> rect.x >> rect.y >> rect.width >> rect.height; | |
| 127 | - return true; | |
| 128 | -} | |
| 129 | - | |
| 130 | -bool _FeatureEvaluator::read( const FileNode& node ) | |
| 131 | -{ | |
| 132 | - features->resize(node.size()); | |
| 133 | - featuresPtr = &(*features)[0]; | |
| 134 | - FileNodeIterator it = node.begin(), it_end = node.end(); | |
| 135 | - for(int i = 0; it != it_end; ++it, i++) | |
| 136 | - { | |
| 137 | - if(!featuresPtr[i].read(*it)) | |
| 138 | - return false; | |
| 139 | - } | |
| 140 | - return true; | |
| 141 | -} | |
| 142 | - | |
| 143 | -bool _FeatureEvaluator::setImage( const Mat& image, Size _origWinSize ) | |
| 144 | -{ | |
| 145 | - int rn = image.rows+1, cn = image.cols+1; | |
| 146 | - origWinSize = _origWinSize; | |
| 147 | - | |
| 148 | - if( image.cols < origWinSize.width || image.rows < origWinSize.height ) | |
| 149 | - return false; | |
| 150 | - | |
| 151 | - if( sum0.rows < rn || sum0.cols < cn ) | |
| 152 | - sum0.create(rn, cn, CV_32S); | |
| 153 | - sum = Mat(rn, cn, CV_32S, sum0.data); | |
| 154 | - integral(image, sum); | |
| 155 | - | |
| 156 | - size_t fi, nfeatures = features->size(); | |
| 157 | - | |
| 158 | - for( fi = 0; fi < nfeatures; fi++ ) | |
| 159 | - featuresPtr[fi].updatePtrs( sum ); | |
| 160 | - return true; | |
| 161 | -} | |
| 162 | - | |
| 163 | -bool _FeatureEvaluator::setWindow( Point pt ) | |
| 164 | -{ | |
| 165 | - if( pt.x < 0 || pt.y < 0 || | |
| 166 | - pt.x + origWinSize.width >= sum.cols || | |
| 167 | - pt.y + origWinSize.height >= sum.rows ) | |
| 168 | - return false; | |
| 169 | - offset = pt.y * ((int)sum.step/sizeof(int)) + pt.x; | |
| 170 | - return true; | |
| 171 | -} | |
| 172 | - | |
| 173 | 122 | // --------------------------------- Cascade Classifier ---------------------------------- |
| 174 | 123 | |
| 175 | 124 | bool _CascadeClassifier::load(const string& filename) |
| 176 | 125 | { |
| 177 | 126 | data = Data(); |
| 178 | - featureEvaluator.release(); | |
| 179 | 127 | |
| 180 | 128 | FileStorage fs(filename, FileStorage::READ); |
| 181 | - if( !fs.isOpened() ) | |
| 182 | - return false; | |
| 183 | - | |
| 184 | - if( read(fs.getFirstTopLevelNode()) ) | |
| 185 | - return true; | |
| 186 | - | |
| 187 | - return false; | |
| 188 | -} | |
| 189 | - | |
| 190 | -bool _CascadeClassifier::read(const FileNode& root) | |
| 191 | -{ | |
| 192 | - if( !data.read(root) ) | |
| 193 | - return false; | |
| 194 | - | |
| 195 | - // load features | |
| 196 | - featureEvaluator = Ptr<_FeatureEvaluator>(new _FeatureEvaluator()); | |
| 197 | - FileNode fn = root[CC_FEATURES]; | |
| 198 | - if( fn.empty() ) | |
| 129 | + if (!fs.isOpened()) | |
| 199 | 130 | return false; |
| 200 | 131 | |
| 201 | - return featureEvaluator->read(fn); | |
| 132 | + return data.read(fs.getFirstTopLevelNode()); | |
| 202 | 133 | } |
| 203 | 134 | |
| 204 | -int _CascadeClassifier::runAt(Point pt, double& weight) | |
| 135 | +int _CascadeClassifier::predict(const Mat &image, double &sum) const | |
| 205 | 136 | { |
| 206 | - if( !featureEvaluator->setWindow(pt) ) | |
| 207 | - return -1; | |
| 137 | + int nstages = (int)data.stages.size(); | |
| 138 | + int nodeOfs = 0, leafOfs = 0; | |
| 139 | + | |
| 140 | + size_t subsetSize = (data.ncategories + 31)/32; | |
| 141 | + const int *cascadeSubsets = &data.subsets[0]; | |
| 142 | + | |
| 143 | + const float *cascadeLeaves = &data.leaves[0]; | |
| 144 | + const Data::DTreeNode *cascadeNodes = &data.nodes[0]; | |
| 145 | + const Data::DTree *cascadeWeaks = &data.classifiers[0]; | |
| 146 | + const Data::Stage *cascadeStages = &data.stages[0]; | |
| 147 | + | |
| 148 | + for (int stageIdx = 0; stageIdx < nstages; stageIdx++) { | |
| 149 | + const Data::Stage &stage = cascadeStages[stageIdx]; | |
| 150 | + sum = 0; | |
| 151 | + | |
| 152 | + for (int wi = 0; wi < stage.ntrees; wi++) { | |
| 153 | + const Data::DTree &weak = cascadeWeaks[stage.first + wi]; | |
| 154 | + int idx = 0, root = nodeOfs; | |
| 155 | + | |
| 156 | + do { | |
| 157 | + const Data::DTreeNode &node = cascadeNodes[root + idx]; | |
| 158 | + if (data.ncategories > 0) { | |
| 159 | + int c = (int)representation->evaluate(image, node.featureIdx); | |
| 160 | + const int* subset = &cascadeSubsets[(root + idx)*subsetSize]; | |
| 161 | + idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right; | |
| 162 | + } else { | |
| 163 | + double val = representation->evaluate(image, node.featureIdx); | |
| 164 | + idx = val < node.threshold ? node.left : node.right; | |
| 165 | + } | |
| 166 | + } while( idx > 0 ); | |
| 208 | 167 | |
| 209 | - if( data.isStumpBased ) | |
| 210 | - return predictCategoricalStump<_FeatureEvaluator>( *this, featureEvaluator, weight ); | |
| 211 | - return predictCategorical<_FeatureEvaluator>( *this, featureEvaluator, weight ); | |
| 168 | + sum += cascadeLeaves[leafOfs - idx]; | |
| 169 | + nodeOfs += weak.nodeCount; | |
| 170 | + leafOfs += weak.nodeCount + 1; | |
| 171 | + } | |
| 172 | + if( sum < stage.threshold ) | |
| 173 | + return -stageIdx; | |
| 174 | + } | |
| 175 | + return 1; | |
| 212 | 176 | } |
| 213 | 177 | |
| 214 | 178 | void _CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects, |
| 215 | 179 | vector<int>& rejectLevels, |
| 216 | 180 | vector<double>& levelWeights, |
| 217 | 181 | double scaleFactor, int minNeighbors, |
| 218 | - int flags, Size minObjectSize, Size maxObjectSize, | |
| 182 | + Size minObjectSize, Size maxObjectSize, | |
| 219 | 183 | bool outputRejectLevels ) |
| 220 | 184 | { |
| 221 | 185 | const double GROUP_EPS = 0.2; |
| ... | ... | @@ -246,14 +210,17 @@ void _CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objec |
| 246 | 210 | |
| 247 | 211 | Mat scaledImage(scaledImageSize, CV_8U, imageBuffer.data); |
| 248 | 212 | resize(image, scaledImage, scaledImageSize, 0, 0, CV_INTER_LINEAR); |
| 249 | - if (!featureEvaluator->setImage(scaledImage, originalWindowSize)) | |
| 250 | - qFatal("Couldn't set the image"); | |
| 213 | + | |
| 214 | + Mat repImage; | |
| 215 | + representation->preprocess(scaledImage, repImage); | |
| 251 | 216 | |
| 252 | 217 | int yStep = factor > 2. ? 1 : 2; |
| 253 | 218 | for (int y = 0; y < processingRectSize.height; y += yStep) { |
| 254 | 219 | for (int x = 0; x < processingRectSize.width; x += yStep) { |
| 220 | + Mat window = repImage(Rect(Point(x, y), representation->postWindowSize())).clone(); | |
| 221 | + | |
| 255 | 222 | double gypWeight; |
| 256 | - int result = runAt(Point(x, y), gypWeight); | |
| 223 | + int result = predict(window, gypWeight); | |
| 257 | 224 | |
| 258 | 225 | if (outputRejectLevels) { |
| 259 | 226 | if (result == 1) |
| ... | ... | @@ -279,29 +246,19 @@ void _CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objec |
| 279 | 246 | groupRectangles(objects, minNeighbors, GROUP_EPS); |
| 280 | 247 | } |
| 281 | 248 | |
| 282 | -void _CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects, | |
| 283 | - double scaleFactor, int minNeighbors, | |
| 284 | - int flags, Size minObjectSize, Size maxObjectSize) | |
| 249 | +void _CascadeClassifier::detectMultiScale(const Mat& image, vector<Rect>& objects, | |
| 250 | + double scaleFactor, int minNeighbors, Size minObjectSize, Size maxObjectSize) | |
| 285 | 251 | { |
| 286 | 252 | vector<int> fakeLevels; |
| 287 | 253 | vector<double> fakeWeights; |
| 288 | 254 | detectMultiScale( image, objects, fakeLevels, fakeWeights, scaleFactor, |
| 289 | - minNeighbors, flags, minObjectSize, maxObjectSize, false ); | |
| 255 | + minNeighbors, minObjectSize, maxObjectSize, false ); | |
| 290 | 256 | } |
| 291 | 257 | |
| 292 | 258 | bool _CascadeClassifier::Data::read(const FileNode &root) |
| 293 | 259 | { |
| 294 | 260 | static const float THRESHOLD_EPS = 1e-5f; |
| 295 | 261 | |
| 296 | - // load stage params | |
| 297 | - string stageTypeStr = (string)root[CC_STAGE_TYPE]; | |
| 298 | - if( stageTypeStr == CC_BOOST ) | |
| 299 | - stageType = BOOST; | |
| 300 | - else | |
| 301 | - return false; | |
| 302 | - | |
| 303 | - featureType = _FeatureEvaluator::LBP; | |
| 304 | - | |
| 305 | 262 | origWinSize.width = (int)root[CC_WIDTH]; |
| 306 | 263 | origWinSize.height = (int)root[CC_HEIGHT]; |
| 307 | 264 | CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 ); | ... | ... |
openbr/core/cascade.h
| ... | ... | @@ -26,33 +26,7 @@ |
| 26 | 26 | #define CC_FEATURE_PARAMS "featureParams" |
| 27 | 27 | #define CC_MAX_CAT_COUNT "maxCatCount" |
| 28 | 28 | |
| 29 | -#define CC_HAAR "HAAR" | |
| 30 | -#define CC_RECTS "rects" | |
| 31 | -#define CC_TILTED "tilted" | |
| 32 | - | |
| 33 | 29 | #define CC_LBP "LBP" |
| 34 | -#define CC_RECT "rect" | |
| 35 | - | |
| 36 | -#define CC_HOG "HOG" | |
| 37 | -#define CC_HOGMulti "HOGMulti" | |
| 38 | - | |
| 39 | -#define CC_NPD "NPD" | |
| 40 | -#define CC_POINTS "points" | |
| 41 | - | |
| 42 | -#define CV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \ | |
| 43 | - /* (x, y) */ \ | |
| 44 | - (p0) = sum + (rect).x + (step) * (rect).y, \ | |
| 45 | - /* (x + w, y) */ \ | |
| 46 | - (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \ | |
| 47 | - /* (x, y + h) */ \ | |
| 48 | - (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \ | |
| 49 | - /* (x + w, y + h) */ \ | |
| 50 | - (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height) | |
| 51 | - | |
| 52 | -#define CALC_SUM_(p0, p1, p2, p3, offset) \ | |
| 53 | - ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset]) | |
| 54 | - | |
| 55 | -#define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset) | |
| 56 | 30 | |
| 57 | 31 | |
| 58 | 32 | using namespace std; |
| ... | ... | @@ -83,132 +57,32 @@ void groupRectangles(vector<Rect>& rectList, vector<int>& weights, int groupThre |
| 83 | 57 | void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vector<int>* weights, vector<double>* levelWeights ); |
| 84 | 58 | void groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vector<double>& levelWeights, int groupThreshold, double eps=0.2); |
| 85 | 59 | |
| 86 | -class _FeatureEvaluator | |
| 87 | -{ | |
| 88 | -public: | |
| 89 | - enum { LBP = 0 }; | |
| 90 | - | |
| 91 | - _FeatureEvaluator() : features(new vector<Feature>()) {} | |
| 92 | - virtual ~_FeatureEvaluator() {} | |
| 93 | - | |
| 94 | - virtual bool read( const FileNode& node ); | |
| 95 | - | |
| 96 | - virtual bool setImage(const Mat& image, Size _origWinSize); | |
| 97 | - virtual bool setWindow(Point pt); | |
| 98 | - | |
| 99 | - int operator()(int featureIdx) const { return featuresPtr[featureIdx].calc(offset); } | |
| 100 | - virtual int calcCat(int featureIdx) const { return (*this)(featureIdx); } | |
| 101 | - | |
| 102 | -protected: | |
| 103 | - struct Feature | |
| 104 | - { | |
| 105 | - Feature(); | |
| 106 | - Feature( int x, int y, int _block_w, int _block_h ) : | |
| 107 | - rect(x, y, _block_w, _block_h) {} | |
| 108 | - | |
| 109 | - int calc( int offset ) const; | |
| 110 | - void updatePtrs( const Mat& sum ); | |
| 111 | - bool read(const FileNode& node ); | |
| 112 | - | |
| 113 | - Rect rect; // weight and height for block | |
| 114 | - const int* p[16]; // fast | |
| 115 | - }; | |
| 116 | - | |
| 117 | - Size origWinSize; | |
| 118 | - Ptr<vector<Feature> > features; | |
| 119 | - Feature* featuresPtr; // optimization | |
| 120 | - Mat sum0, sum, window; | |
| 121 | - Rect normrect; | |
| 122 | - | |
| 123 | - int offset; | |
| 124 | -}; | |
| 125 | - | |
| 126 | -inline _FeatureEvaluator::Feature::Feature() | |
| 127 | -{ | |
| 128 | - rect = Rect(); | |
| 129 | - for( int i = 0; i < 16; i++ ) | |
| 130 | - p[i] = 0; | |
| 131 | -} | |
| 132 | - | |
| 133 | -inline int _FeatureEvaluator::Feature::calc( int _offset ) const | |
| 134 | -{ | |
| 135 | - int cval = CALC_SUM_( p[5], p[6], p[9], p[10], _offset ); | |
| 136 | - | |
| 137 | - return (CALC_SUM_( p[0], p[1], p[4], p[5], _offset ) >= cval ? 128 : 0) | // 0 | |
| 138 | - (CALC_SUM_( p[1], p[2], p[5], p[6], _offset ) >= cval ? 64 : 0) | // 1 | |
| 139 | - (CALC_SUM_( p[2], p[3], p[6], p[7], _offset ) >= cval ? 32 : 0) | // 2 | |
| 140 | - (CALC_SUM_( p[6], p[7], p[10], p[11], _offset ) >= cval ? 16 : 0) | // 5 | |
| 141 | - (CALC_SUM_( p[10], p[11], p[14], p[15], _offset ) >= cval ? 8 : 0)| // 8 | |
| 142 | - (CALC_SUM_( p[9], p[10], p[13], p[14], _offset ) >= cval ? 4 : 0)| // 7 | |
| 143 | - (CALC_SUM_( p[8], p[9], p[12], p[13], _offset ) >= cval ? 2 : 0)| // 6 | |
| 144 | - (CALC_SUM_( p[4], p[5], p[8], p[9], _offset ) >= cval ? 1 : 0); | |
| 145 | -} | |
| 146 | - | |
| 147 | -inline void _FeatureEvaluator::Feature::updatePtrs( const Mat& _sum ) | |
| 148 | -{ | |
| 149 | - const int* ptr = (const int*)_sum.data; | |
| 150 | - size_t step = _sum.step/sizeof(ptr[0]); | |
| 151 | - Rect tr = rect; | |
| 152 | - CV_SUM_PTRS( p[0], p[1], p[4], p[5], ptr, tr, step ); | |
| 153 | - tr.x += 2*rect.width; | |
| 154 | - CV_SUM_PTRS( p[2], p[3], p[6], p[7], ptr, tr, step ); | |
| 155 | - tr.y += 2*rect.height; | |
| 156 | - CV_SUM_PTRS( p[10], p[11], p[14], p[15], ptr, tr, step ); | |
| 157 | - tr.x -= 2*rect.width; | |
| 158 | - CV_SUM_PTRS( p[8], p[9], p[12], p[13], ptr, tr, step ); | |
| 159 | -} | |
| 160 | - | |
| 161 | -enum | |
| 162 | -{ | |
| 163 | - CASCADE_DO_CANNY_PRUNING=1, | |
| 164 | - CASCADE_SCALE_IMAGE=2, | |
| 165 | - CASCADE_FIND_BIGGEST_OBJECT=4, | |
| 166 | - CASCADE_DO_ROUGH_SEARCH=8 | |
| 167 | -}; | |
| 168 | - | |
| 169 | 60 | class _CascadeClassifier |
| 170 | 61 | { |
| 171 | 62 | public: |
| 172 | - _CascadeClassifier() {} | |
| 173 | - _CascadeClassifier( const string& filename ) { load(filename); } | |
| 63 | + _CascadeClassifier() : representation(Representation::make("MBLBP(24,24)", NULL)) {} | |
| 64 | + _CascadeClassifier(const string& filename) : representation(Representation::make("MBLBP(24,24)", NULL)) { load(filename); } | |
| 174 | 65 | ~_CascadeClassifier() {} |
| 175 | 66 | |
| 176 | - bool load( const string& filename ); | |
| 177 | - bool read( const FileNode& node ); | |
| 178 | - void detectMultiScale( const Mat& image, | |
| 67 | + bool load(const string& filename); | |
| 68 | + void detectMultiScale(const Mat& image, | |
| 179 | 69 | vector<Rect>& objects, |
| 180 | 70 | double scaleFactor=1.1, |
| 181 | - int minNeighbors=3, int flags=0, | |
| 71 | + int minNeighbors=3, | |
| 182 | 72 | Size minSize=Size(), |
| 183 | - Size maxSize=Size() ); | |
| 73 | + Size maxSize=Size()); | |
| 184 | 74 | |
| 185 | 75 | void detectMultiScale( const Mat& image, |
| 186 | 76 | vector<Rect>& objects, |
| 187 | 77 | vector<int>& rejectLevels, |
| 188 | 78 | vector<double>& levelWeights, |
| 189 | 79 | double scaleFactor=1.1, |
| 190 | - int minNeighbors=3, int flags=0, | |
| 80 | + int minNeighbors=3, | |
| 191 | 81 | Size minSize=Size(), |
| 192 | 82 | Size maxSize=Size(), |
| 193 | 83 | bool outputRejectLevels=false ); |
| 194 | 84 | |
| 195 | - enum { BOOST = 0 }; | |
| 196 | - enum { DO_CANNY_PRUNING = 1, SCALE_IMAGE = 2, | |
| 197 | - FIND_BIGGEST_OBJECT = 4, DO_ROUGH_SEARCH = 8 }; | |
| 198 | - | |
| 199 | - template<class FEval> | |
| 200 | - friend int predictOrdered( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight); | |
| 201 | - | |
| 202 | - template<class FEval> | |
| 203 | - friend int predictCategorical( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight); | |
| 204 | - | |
| 205 | - template<class FEval> | |
| 206 | - friend int predictOrderedStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight); | |
| 207 | - | |
| 208 | - template<class FEval> | |
| 209 | - friend int predictCategoricalStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &featureEvaluator, double& weight); | |
| 210 | - | |
| 211 | - virtual int runAt(Point pt, double& weight ); | |
| 85 | + int predict(const Mat &image, double &weight) const; | |
| 212 | 86 | |
| 213 | 87 | class Data |
| 214 | 88 | { |
| ... | ... | @@ -250,157 +124,9 @@ public: |
| 250 | 124 | }; |
| 251 | 125 | |
| 252 | 126 | Data data; |
| 253 | - Ptr<_FeatureEvaluator> featureEvaluator; | |
| 127 | + Representation *representation; | |
| 254 | 128 | }; |
| 255 | 129 | |
| 256 | -template<class FEval> | |
| 257 | -inline int predictOrdered( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum ) | |
| 258 | -{ | |
| 259 | - int nstages = (int)cascade.data.stages.size(); | |
| 260 | - int nodeOfs = 0, leafOfs = 0; | |
| 261 | - FEval& featureEvaluator = (FEval&)*_featureEvaluator; | |
| 262 | - float* cascadeLeaves = &cascade.data.leaves[0]; | |
| 263 | - _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; | |
| 264 | - _CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; | |
| 265 | - _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; | |
| 266 | - | |
| 267 | - for( int si = 0; si < nstages; si++ ) | |
| 268 | - { | |
| 269 | - _CascadeClassifier::Data::Stage& stage = cascadeStages[si]; | |
| 270 | - int wi, ntrees = stage.ntrees; | |
| 271 | - sum = 0; | |
| 272 | - | |
| 273 | - for( wi = 0; wi < ntrees; wi++ ) | |
| 274 | - { | |
| 275 | - _CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi]; | |
| 276 | - int idx = 0, root = nodeOfs; | |
| 277 | - | |
| 278 | - do | |
| 279 | - { | |
| 280 | - _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx]; | |
| 281 | - double val = featureEvaluator(node.featureIdx); | |
| 282 | - idx = val < node.threshold ? node.left : node.right; | |
| 283 | - } | |
| 284 | - while( idx > 0 ); | |
| 285 | - sum += cascadeLeaves[leafOfs - idx]; | |
| 286 | - nodeOfs += weak.nodeCount; | |
| 287 | - leafOfs += weak.nodeCount + 1; | |
| 288 | - } | |
| 289 | - if( sum < stage.threshold ) | |
| 290 | - return -si; | |
| 291 | - } | |
| 292 | - return 1; | |
| 293 | -} | |
| 294 | - | |
| 295 | -template<class FEval> | |
| 296 | -inline int predictCategorical( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum ) | |
| 297 | -{ | |
| 298 | - int nstages = (int)cascade.data.stages.size(); | |
| 299 | - int nodeOfs = 0, leafOfs = 0; | |
| 300 | - FEval& featureEvaluator = (FEval&)*_featureEvaluator; | |
| 301 | - size_t subsetSize = (cascade.data.ncategories + 31)/32; | |
| 302 | - int* cascadeSubsets = &cascade.data.subsets[0]; | |
| 303 | - float* cascadeLeaves = &cascade.data.leaves[0]; | |
| 304 | - _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; | |
| 305 | - _CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; | |
| 306 | - _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; | |
| 307 | - | |
| 308 | - for(int si = 0; si < nstages; si++ ) | |
| 309 | - { | |
| 310 | - _CascadeClassifier::Data::Stage& stage = cascadeStages[si]; | |
| 311 | - int wi, ntrees = stage.ntrees; | |
| 312 | - sum = 0; | |
| 313 | - | |
| 314 | - for( wi = 0; wi < ntrees; wi++ ) | |
| 315 | - { | |
| 316 | - _CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi]; | |
| 317 | - int idx = 0, root = nodeOfs; | |
| 318 | - do | |
| 319 | - { | |
| 320 | - _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx]; | |
| 321 | - int c = featureEvaluator(node.featureIdx); | |
| 322 | - const int* subset = &cascadeSubsets[(root + idx)*subsetSize]; | |
| 323 | - idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right; | |
| 324 | - } | |
| 325 | - while( idx > 0 ); | |
| 326 | - sum += cascadeLeaves[leafOfs - idx]; | |
| 327 | - nodeOfs += weak.nodeCount; | |
| 328 | - leafOfs += weak.nodeCount + 1; | |
| 329 | - } | |
| 330 | - if( sum < stage.threshold ) | |
| 331 | - return -si; | |
| 332 | - } | |
| 333 | - return 1; | |
| 334 | -} | |
| 335 | - | |
| 336 | -template<class FEval> | |
| 337 | -inline int predictOrderedStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum ) | |
| 338 | -{ | |
| 339 | - int nodeOfs = 0, leafOfs = 0; | |
| 340 | - FEval& featureEvaluator = (FEval&)*_featureEvaluator; | |
| 341 | - float* cascadeLeaves = &cascade.data.leaves[0]; | |
| 342 | - _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; | |
| 343 | - _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; | |
| 344 | - | |
| 345 | - int nstages = (int)cascade.data.stages.size(); | |
| 346 | - for( int stageIdx = 0; stageIdx < nstages; stageIdx++ ) | |
| 347 | - { | |
| 348 | - _CascadeClassifier::Data::Stage& stage = cascadeStages[stageIdx]; | |
| 349 | - sum = 0.0; | |
| 350 | - | |
| 351 | - int ntrees = stage.ntrees; | |
| 352 | - for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 ) | |
| 353 | - { | |
| 354 | - _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs]; | |
| 355 | - double value = featureEvaluator(node.featureIdx); | |
| 356 | - sum += cascadeLeaves[ value < node.threshold ? leafOfs : leafOfs + 1 ]; | |
| 357 | - } | |
| 358 | - | |
| 359 | - if( sum < stage.threshold ) | |
| 360 | - return -stageIdx; | |
| 361 | - } | |
| 362 | - | |
| 363 | - return 1; | |
| 364 | -} | |
| 365 | - | |
| 366 | -template<class FEval> | |
| 367 | -inline int predictCategoricalStump( _CascadeClassifier& cascade, Ptr<_FeatureEvaluator> &_featureEvaluator, double& sum ) | |
| 368 | -{ | |
| 369 | - int nstages = (int)cascade.data.stages.size(); | |
| 370 | - int nodeOfs = 0, leafOfs = 0; | |
| 371 | - FEval& featureEvaluator = (FEval&)*_featureEvaluator; | |
| 372 | - size_t subsetSize = (cascade.data.ncategories + 31)/32; | |
| 373 | - int* cascadeSubsets = &cascade.data.subsets[0]; | |
| 374 | - float* cascadeLeaves = &cascade.data.leaves[0]; | |
| 375 | - _CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; | |
| 376 | - _CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; | |
| 377 | - | |
| 378 | - for( int si = 0; si < nstages; si++ ) | |
| 379 | - { | |
| 380 | - _CascadeClassifier::Data::Stage& stage = cascadeStages[si]; | |
| 381 | - int wi, ntrees = stage.ntrees; | |
| 382 | - | |
| 383 | - sum = 0; | |
| 384 | - | |
| 385 | - for( wi = 0; wi < ntrees; wi++ ) | |
| 386 | - { | |
| 387 | - _CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs]; | |
| 388 | - int c = featureEvaluator(node.featureIdx); | |
| 389 | - const int* subset = &cascadeSubsets[nodeOfs*subsetSize]; | |
| 390 | - | |
| 391 | - sum += cascadeLeaves[ subset[c>>5] & (1 << (c & 31)) ? leafOfs : leafOfs+1]; | |
| 392 | - | |
| 393 | - nodeOfs++; | |
| 394 | - leafOfs += 2; | |
| 395 | - } | |
| 396 | - | |
| 397 | - if( sum < stage.threshold ) | |
| 398 | - return -si; | |
| 399 | - } | |
| 400 | - | |
| 401 | - return 1; | |
| 402 | -} | |
| 403 | - | |
| 404 | 130 | } // namespace br |
| 405 | 131 | |
| 406 | 132 | #endif // CASCADE_H | ... | ... |
openbr/openbr_plugin.h
| ... | ... | @@ -1436,9 +1436,8 @@ public: |
| 1436 | 1436 | // OpenCV compatibility |
| 1437 | 1437 | virtual int numFeatures() const = 0; |
| 1438 | 1438 | virtual int maxCatCount() const = 0; |
| 1439 | - virtual void getUsedFeatures(cv::Mat &featureMap) const { (void)featureMap; return; } | |
| 1440 | - virtual void write(cv::FileStorage &fs, const cv::Mat &featureMap) const { (void)fs; (void)featureMap; } | |
| 1441 | - virtual void writeFeatures(cv::FileStorage &fs, const cv::Mat &featureMap) const { (void)fs; (void)featureMap; } | |
| 1439 | + virtual void write(cv::FileStorage &fs) const { (void)fs; } | |
| 1440 | + virtual void read(const cv::FileNode &node) { (void)node; } | |
| 1442 | 1441 | }; |
| 1443 | 1442 | |
| 1444 | 1443 | /*! | ... | ... |
openbr/plugins/classification/boostedforest.cpp
| ... | ... | @@ -62,19 +62,9 @@ class BoostedForestClassifier : public Classifier |
| 62 | 62 | return representation->preWindowSize(); |
| 63 | 63 | } |
| 64 | 64 | |
| 65 | - void getUsedFeatures(Mat &featureMap) const | |
| 65 | + void write(FileStorage &fs) const | |
| 66 | 66 | { |
| 67 | - boost->markUsedFeaturesInMap(featureMap); | |
| 68 | - } | |
| 69 | - | |
| 70 | - void write(FileStorage &fs, const Mat &featureMap) const | |
| 71 | - { | |
| 72 | - boost->write(fs, featureMap); | |
| 73 | - } | |
| 74 | - | |
| 75 | - void writeFeatures(FileStorage &fs, const Mat &featureMap) const | |
| 76 | - { | |
| 77 | - featureEvaluator->writeFeatures(fs, featureMap); | |
| 67 | + boost->write(fs); | |
| 78 | 68 | } |
| 79 | 69 | }; |
| 80 | 70 | ... | ... |
openbr/plugins/classification/cascade.cpp
| ... | ... | @@ -176,13 +176,7 @@ class CascadeClassifier : public Classifier |
| 176 | 176 | return stages.first()->windowSize(); |
| 177 | 177 | } |
| 178 | 178 | |
| 179 | - void getUsedFeatures(Mat &featureMap) const | |
| 180 | - { | |
| 181 | - foreach (const Classifier *stage, stages) | |
| 182 | - stage->getUsedFeatures(featureMap); | |
| 183 | - } | |
| 184 | - | |
| 185 | - void write(FileStorage &fs, const Mat &featureMap) const | |
| 179 | + void write(FileStorage &fs) const | |
| 186 | 180 | { |
| 187 | 181 | fs << CC_STAGE_TYPE << CC_BOOST; |
| 188 | 182 | fs << CC_FEATURE_TYPE << CC_LBP; |
| ... | ... | @@ -199,24 +193,15 @@ class CascadeClassifier : public Classifier |
| 199 | 193 | |
| 200 | 194 | fs << CC_STAGE_NUM << stages.size(); |
| 201 | 195 | |
| 202 | - char cmnt[30]; | |
| 203 | - int i = 0; | |
| 204 | 196 | fs << CC_STAGES << "["; |
| 205 | 197 | foreach (const Classifier *stage, stages) { |
| 206 | - sprintf( cmnt, "stage %d", i ); | |
| 207 | - cvWriteComment( fs.fs, cmnt, 0 ); | |
| 208 | 198 | fs << "{"; |
| 209 | - stage->write(fs, featureMap); | |
| 199 | + stage->write(fs); | |
| 210 | 200 | fs << "}"; |
| 211 | 201 | } |
| 212 | 202 | fs << "]"; |
| 213 | 203 | } |
| 214 | 204 | |
| 215 | - void writeFeatures(FileStorage &fs, const Mat& featureMap) const | |
| 216 | - { | |
| 217 | - stages.first()->writeFeatures(fs, featureMap); | |
| 218 | - } | |
| 219 | - | |
| 220 | 205 | private: |
| 221 | 206 | float fillTrainingSet(ImageHandler &imgHandler, QList<Mat> &images, QList<float> &labels) |
| 222 | 207 | { | ... | ... |
openbr/plugins/imgproc/slidingwindow.cpp
| ... | ... | @@ -155,17 +155,7 @@ class SlidingWindowTransform : public Transform |
| 155 | 155 | |
| 156 | 156 | fs << FileStorage::getDefaultObjectName(filename) << "{"; |
| 157 | 157 | |
| 158 | - Mat featureMap(1, classifier->numFeatures(), CV_32SC1); | |
| 159 | - featureMap.setTo(Scalar(-1)); | |
| 160 | - | |
| 161 | - classifier->getUsedFeatures(featureMap); | |
| 162 | - | |
| 163 | - for (int fi = 0, idx = 0; fi < classifier->numFeatures(); fi++) | |
| 164 | - if (featureMap.at<int>(0, fi) >= 0) | |
| 165 | - featureMap.ptr<int>(0)[fi] = idx++; | |
| 166 | - | |
| 167 | - classifier->write(fs, featureMap); | |
| 168 | - classifier->writeFeatures(fs, featureMap); | |
| 158 | + classifier->write(fs); | |
| 169 | 159 | |
| 170 | 160 | fs << "}"; |
| 171 | 161 | } | ... | ... |
openbr/plugins/metadata/cascade.cpp
| ... | ... | @@ -112,8 +112,8 @@ class CascadeTransform : public UntrainableMetaTransform |
| 112 | 112 | std::vector<Rect> rects; |
| 113 | 113 | std::vector<int> rejectLevels; |
| 114 | 114 | std::vector<double> levelWeights; |
| 115 | - if (ROCMode) cascade->detectMultiScale(m, rects, rejectLevels, levelWeights, 1.2, minNeighbors, (enrollAll ? 0 : CASCADE_FIND_BIGGEST_OBJECT) | CASCADE_SCALE_IMAGE, Size(minSize, minSize), Size(), true); | |
| 116 | - else cascade->detectMultiScale(m, rects, 1.2, minNeighbors, enrollAll ? 0 : CASCADE_FIND_BIGGEST_OBJECT, Size(minSize, minSize)); | |
| 115 | + if (ROCMode) cascade->detectMultiScale(m, rects, rejectLevels, levelWeights, 1.2, minNeighbors, Size(minSize, minSize), Size(), true); | |
| 116 | + else cascade->detectMultiScale(m, rects, 1.2, minNeighbors, Size(minSize, minSize)); | |
| 117 | 117 | |
| 118 | 118 | if (!enrollAll && rects.empty()) |
| 119 | 119 | rects.push_back(Rect(0, 0, m.cols, m.rows)); | ... | ... |