Commit 9eaaf62b25e62304f329c36e92ba25720568c947

Authored by Jordan Cheney
1 parent d4a5b59c

Steps towards simplifying and integrating the frontend cascade

openbr/core/cascade.cpp
@@ -121,81 +121,118 @@ void br::groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vect @@ -121,81 +121,118 @@ void br::groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vect
121 121
122 // --------------------------------- Cascade Classifier ---------------------------------- 122 // --------------------------------- Cascade Classifier ----------------------------------
123 123
124 -bool _CascadeClassifier::load(const string& filename) 124 +static void loadRecursive(const FileNode &fn, _CascadeClassifier::Node *node, int maxCatCount)
125 { 125 {
126 - data = Data(); 126 + bool hasChildren = (int)fn["hasChildren"];
  127 + if (hasChildren) {
  128 + if (maxCatCount > 1) {
  129 + FileNode subset_fn = fn["subset"];
  130 + for (FileNodeIterator subset_it = subset_fn.begin(); subset_it != subset_fn.end(); ++subset_it)
  131 + node->subset.append((int)*subset_it);
  132 + } else {
  133 + node->threshold = (float)fn["threshold"];
  134 + }
  135 +
  136 + node->featureIdx = (int)fn["feature_idx"];
127 137
  138 + node->left = new _CascadeClassifier::Node;
  139 + loadRecursive(fn["left"], node->left, maxCatCount);
  140 + node->right = new _CascadeClassifier::Node;
  141 + loadRecursive(fn["right"], node->right, maxCatCount);
  142 + } else {
  143 + node->value = (float)fn["value"];
  144 + }
  145 +}
  146 +
  147 +bool _CascadeClassifier::load(const string& filename)
  148 +{
128 FileStorage fs(filename, FileStorage::READ); 149 FileStorage fs(filename, FileStorage::READ);
129 if (!fs.isOpened()) 150 if (!fs.isOpened())
130 return false; 151 return false;
131 152
132 - return data.read(fs.getFirstTopLevelNode());  
133 -} 153 + FileNode root = fs.getFirstTopLevelNode();
134 154
135 -int _CascadeClassifier::predict(const Mat &image, double &sum) const  
136 -{  
137 - int nstages = (int)data.stages.size();  
138 - int nodeOfs = 0, leafOfs = 0; 155 + const float THRESHOLD_EPS = 1e-5;
  156 +
  157 + int maxCatCount = representation->maxCatCount();
139 158
140 - size_t subsetSize = (data.ncategories + 31)/32;  
141 - const int *cascadeSubsets = &data.subsets[0]; 159 + // load stages
  160 + FileNode stages_fn = root["stages"];
  161 + if( stages_fn.empty() )
  162 + return false;
142 163
143 - const float *cascadeLeaves = &data.leaves[0];  
144 - const Data::DTreeNode *cascadeNodes = &data.nodes[0];  
145 - const Data::DTree *cascadeWeaks = &data.classifiers[0];  
146 - const Data::Stage *cascadeStages = &data.stages[0]; 164 + for (FileNodeIterator stage_it = stages_fn.begin(); stage_it != stages_fn.end(); ++stage_it) {
  165 + FileNode stage_fn = *stage_it;
  166 +
  167 + Stage stage;
  168 + stage.threshold = (float)stage_fn["stageThreshold"] - THRESHOLD_EPS;
  169 +
  170 + FileNode nodes_fn = stage_fn["weakClassifiers"];
  171 + if(nodes_fn.empty())
  172 + return false;
147 173
148 - for (int stageIdx = 0; stageIdx < nstages; stageIdx++) {  
149 - const Data::Stage &stage = cascadeStages[stageIdx]; 174 + for (FileNodeIterator node_it = nodes_fn.begin(); node_it != nodes_fn.end(); ++node_it) {
  175 + FileNode node_fn = *node_it;
  176 +
  177 + Node *root = new Node;
  178 + loadRecursive(node_fn, root, maxCatCount);
  179 +
  180 + stage.trees.append(root);
  181 + }
  182 +
  183 + stages.append(stage);
  184 + }
  185 +
  186 + return true;
  187 +}
  188 +
  189 +int _CascadeClassifier::predict(const Mat &image, double &sum) const
  190 +{
  191 + for (int stageIdx = 0; stageIdx < stages.size(); stageIdx++) {
  192 + Stage stage = stages[stageIdx];
150 sum = 0; 193 sum = 0;
151 194
152 - for (int wi = 0; wi < stage.ntrees; wi++) {  
153 - const Data::DTree &weak = cascadeWeaks[stage.first + wi];  
154 - int idx = 0, root = nodeOfs; 195 + for (int treeIdx = 0; treeIdx < stage.trees.size(); treeIdx++) {
  196 + Node *node = stage.trees[treeIdx];
155 197
156 - do {  
157 - const Data::DTreeNode &node = cascadeNodes[root + idx];  
158 - if (data.ncategories > 0) {  
159 - int c = (int)representation->evaluate(image, node.featureIdx);  
160 - const int* subset = &cascadeSubsets[(root + idx)*subsetSize];  
161 - idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right; 198 + while (node->left) {
  199 + if (representation->maxCatCount() > 1) {
  200 + int c = (int)representation->evaluate(image, node->featureIdx);
  201 + node = (node->subset[c >> 5] & (1 << (c & 31))) ? node->left : node->right;
162 } else { 202 } else {
163 - double val = representation->evaluate(image, node.featureIdx);  
164 - idx = val < node.threshold ? node.left : node.right; 203 + double val = representation->evaluate(image, node->featureIdx);
  204 + node = val < node->threshold ? node->left : node->right;
165 } 205 }
166 - } while( idx > 0 );  
167 -  
168 - sum += cascadeLeaves[leafOfs - idx];  
169 - nodeOfs += weak.nodeCount;  
170 - leafOfs += weak.nodeCount + 1; 206 + }
  207 + sum += node->value;
171 } 208 }
172 - if( sum < stage.threshold )  
173 - return -stageIdx; 209 +
  210 + if (sum < stage.threshold)
  211 + return stageIdx;
174 } 212 }
175 - return 1; 213 +
  214 + return stages.size();
176 } 215 }
177 216
178 -void _CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,  
179 - vector<int>& rejectLevels,  
180 - vector<double>& levelWeights,  
181 - double scaleFactor, int minNeighbors,  
182 - Size minObjectSize, Size maxObjectSize,  
183 - bool outputRejectLevels ) 217 +void _CascadeClassifier::detectMultiScale(const Mat& image, vector<Rect>& objects, vector<int>& rejectLevels,
  218 + vector<double>& levelWeights,
  219 + double scaleFactor, int minNeighbors,
  220 + Size minSize, Size maxSize) const
184 { 221 {
185 const double GROUP_EPS = 0.2; 222 const double GROUP_EPS = 0.2;
186 223
187 CV_Assert( scaleFactor > 1 && image.depth() == CV_8U ); 224 CV_Assert( scaleFactor > 1 && image.depth() == CV_8U );
188 225
189 - if (data.stages.empty()) 226 + if (stages.empty())
190 return; 227 return;
191 228
192 - if( maxObjectSize.height == 0 || maxObjectSize.width == 0 )  
193 - maxObjectSize = image.size(); 229 + if( maxSize.height == 0 || maxSize.width == 0 )
  230 + maxSize = image.size();
194 231
195 Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U); 232 Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U);
196 233
197 for (double factor = 1; ; factor *= scaleFactor) { 234 for (double factor = 1; ; factor *= scaleFactor) {
198 - Size originalWindowSize = data.origWinSize; 235 + Size originalWindowSize = representation->preWindowSize();
199 236
200 Size windowSize(cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) ); 237 Size windowSize(cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) );
201 Size scaledImageSize(cvRound(image.cols/factor ), cvRound(image.rows/factor)); 238 Size scaledImageSize(cvRound(image.cols/factor ), cvRound(image.rows/factor));
@@ -203,9 +240,9 @@ void _CascadeClassifier::detectMultiScale( const Mat&amp; image, vector&lt;Rect&gt;&amp; objec @@ -203,9 +240,9 @@ void _CascadeClassifier::detectMultiScale( const Mat&amp; image, vector&lt;Rect&gt;&amp; objec
203 240
204 if (processingRectSize.width <= 0 || processingRectSize.height <= 0) 241 if (processingRectSize.width <= 0 || processingRectSize.height <= 0)
205 break; 242 break;
206 - if (windowSize.width > maxObjectSize.width || windowSize.height > maxObjectSize.height) 243 + if (windowSize.width > maxSize.width || windowSize.height > maxSize.height)
207 break; 244 break;
208 - if (windowSize.width < minObjectSize.width || windowSize.height < minObjectSize.height) 245 + if (windowSize.width < minSize.width || windowSize.height < minSize.height)
209 continue; 246 continue;
210 247
211 Mat scaledImage(scaledImageSize, CV_8U, imageBuffer.data); 248 Mat scaledImage(scaledImageSize, CV_8U, imageBuffer.data);
@@ -222,126 +259,17 @@ void _CascadeClassifier::detectMultiScale( const Mat&amp; image, vector&lt;Rect&gt;&amp; objec @@ -222,126 +259,17 @@ void _CascadeClassifier::detectMultiScale( const Mat&amp; image, vector&lt;Rect&gt;&amp; objec
222 double gypWeight; 259 double gypWeight;
223 int result = predict(window, gypWeight); 260 int result = predict(window, gypWeight);
224 261
225 - if (outputRejectLevels) {  
226 - if (result == 1)  
227 - result = -(int)data.stages.size();  
228 - if (data.stages.size() + result < 4) {  
229 - objects.push_back(Rect(cvRound(x*factor), cvRound(y*factor), windowSize.width, windowSize.height));  
230 - rejectLevels.push_back(-result);  
231 - levelWeights.push_back(gypWeight);  
232 - }  
233 - }  
234 - else if (result > 0) { 262 + if (stages.size() - result < 4) {
235 objects.push_back(Rect(cvRound(x*factor), cvRound(y*factor), windowSize.width, windowSize.height)); 263 objects.push_back(Rect(cvRound(x*factor), cvRound(y*factor), windowSize.width, windowSize.height));
  264 + rejectLevels.push_back(result);
  265 + levelWeights.push_back(gypWeight);
236 } 266 }
  267 +
237 if (result == 0) 268 if (result == 0)
238 x += yStep; 269 x += yStep;
239 } 270 }
240 } 271 }
241 } 272 }
242 273
243 - if (outputRejectLevels)  
244 - groupRectangles(objects, rejectLevels, levelWeights, minNeighbors, GROUP_EPS);  
245 - else  
246 - groupRectangles(objects, minNeighbors, GROUP_EPS);  
247 -}  
248 -  
249 -void _CascadeClassifier::detectMultiScale(const Mat& image, vector<Rect>& objects,  
250 - double scaleFactor, int minNeighbors, Size minObjectSize, Size maxObjectSize)  
251 -{  
252 - vector<int> fakeLevels;  
253 - vector<double> fakeWeights;  
254 - detectMultiScale( image, objects, fakeLevels, fakeWeights, scaleFactor,  
255 - minNeighbors, minObjectSize, maxObjectSize, false );  
256 -}  
257 -  
258 -bool _CascadeClassifier::Data::read(const FileNode &root)  
259 -{  
260 - static const float THRESHOLD_EPS = 1e-5f;  
261 -  
262 - origWinSize.width = (int)root[CC_WIDTH];  
263 - origWinSize.height = (int)root[CC_HEIGHT];  
264 - CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 );  
265 -  
266 - isStumpBased = (int)(root[CC_STAGE_PARAMS][CC_MAX_DEPTH]) == 1 ? true : false;  
267 -  
268 - // load feature params  
269 - FileNode fn = root[CC_FEATURE_PARAMS];  
270 - if( fn.empty() )  
271 - return false;  
272 -  
273 - ncategories = fn[CC_MAX_CAT_COUNT];  
274 - int subsetSize = (ncategories + 31)/32,  
275 - nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 );  
276 -  
277 - // load stages  
278 - fn = root[CC_STAGES];  
279 - if( fn.empty() )  
280 - return false;  
281 -  
282 - stages.reserve(fn.size());  
283 - classifiers.clear();  
284 - nodes.clear();  
285 -  
286 - FileNodeIterator it = fn.begin(), it_end = fn.end();  
287 -  
288 - for( int si = 0; it != it_end; si++, ++it )  
289 - {  
290 - FileNode fns = *it;  
291 - Stage stage;  
292 - stage.threshold = (float)fns[CC_STAGE_THRESHOLD] - THRESHOLD_EPS;  
293 - fns = fns[CC_WEAK_CLASSIFIERS];  
294 - if(fns.empty())  
295 - return false;  
296 - stage.ntrees = (int)fns.size();  
297 - stage.first = (int)classifiers.size();  
298 - stages.push_back(stage);  
299 - classifiers.reserve(stages[si].first + stages[si].ntrees);  
300 -  
301 - FileNodeIterator it1 = fns.begin(), it1_end = fns.end();  
302 - for( ; it1 != it1_end; ++it1 ) // weak trees  
303 - {  
304 - FileNode fnw = *it1;  
305 - FileNode internalNodes = fnw[CC_INTERNAL_NODES];  
306 - FileNode leafValues = fnw[CC_LEAF_VALUES];  
307 - if( internalNodes.empty() || leafValues.empty() )  
308 - return false;  
309 -  
310 - DTree tree;  
311 - tree.nodeCount = (int)internalNodes.size()/nodeStep;  
312 - classifiers.push_back(tree);  
313 -  
314 - nodes.reserve(nodes.size() + tree.nodeCount);  
315 - leaves.reserve(leaves.size() + leafValues.size());  
316 - if( subsetSize > 0 )  
317 - subsets.reserve(subsets.size() + tree.nodeCount*subsetSize);  
318 -  
319 - FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end();  
320 -  
321 - for( ; internalNodesIter != internalNodesEnd; ) // nodes  
322 - {  
323 - DTreeNode node;  
324 - node.left = (int)*internalNodesIter; ++internalNodesIter;  
325 - node.right = (int)*internalNodesIter; ++internalNodesIter;  
326 - node.featureIdx = (int)*internalNodesIter; ++internalNodesIter;  
327 - if( subsetSize > 0 )  
328 - {  
329 - for( int j = 0; j < subsetSize; j++, ++internalNodesIter )  
330 - subsets.push_back((int)*internalNodesIter);  
331 - node.threshold = 0.f;  
332 - }  
333 - else  
334 - {  
335 - node.threshold = (float)*internalNodesIter; ++internalNodesIter;  
336 - }  
337 - nodes.push_back(node);  
338 - }  
339 -  
340 - internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end();  
341 -  
342 - for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves  
343 - leaves.push_back((float)*internalNodesIter);  
344 - }  
345 - }  
346 - return true; 274 + groupRectangles(objects, rejectLevels, levelWeights, minNeighbors, GROUP_EPS);
347 } 275 }
openbr/core/cascade.h
@@ -4,31 +4,6 @@ @@ -4,31 +4,6 @@
4 #include <openbr/openbr_plugin.h> 4 #include <openbr/openbr_plugin.h>
5 #include <opencv2/imgproc/imgproc.hpp> 5 #include <opencv2/imgproc/imgproc.hpp>
6 6
7 -#define CC_CASCADE_PARAMS "cascadeParams"  
8 -#define CC_STAGE_TYPE "stageType"  
9 -#define CC_FEATURE_TYPE "featureType"  
10 -#define CC_HEIGHT "height"  
11 -#define CC_WIDTH "width"  
12 -  
13 -#define CC_STAGE_NUM "stageNum"  
14 -#define CC_STAGES "stages"  
15 -#define CC_STAGE_PARAMS "stageParams"  
16 -  
17 -#define CC_BOOST "BOOST"  
18 -#define CC_MAX_DEPTH "maxDepth"  
19 -#define CC_WEAK_COUNT "maxWeakCount"  
20 -#define CC_STAGE_THRESHOLD "stageThreshold"  
21 -#define CC_WEAK_CLASSIFIERS "weakClassifiers"  
22 -#define CC_INTERNAL_NODES "internalNodes"  
23 -#define CC_LEAF_VALUES "leafValues"  
24 -  
25 -#define CC_FEATURES "features"  
26 -#define CC_FEATURE_PARAMS "featureParams"  
27 -#define CC_MAX_CAT_COUNT "maxCatCount"  
28 -  
29 -#define CC_LBP "LBP"  
30 -  
31 -  
32 using namespace std; 7 using namespace std;
33 using namespace cv; 8 using namespace cv;
34 9
@@ -65,65 +40,37 @@ public: @@ -65,65 +40,37 @@ public:
65 ~_CascadeClassifier() {} 40 ~_CascadeClassifier() {}
66 41
67 bool load(const string& filename); 42 bool load(const string& filename);
68 - void detectMultiScale(const Mat& image,  
69 - vector<Rect>& objects,  
70 - double scaleFactor=1.1,  
71 - int minNeighbors=3,  
72 - Size minSize=Size(),  
73 - Size maxSize=Size());  
74 43
75 - void detectMultiScale( const Mat& image, 44 + void detectMultiScale(const Mat& image,
76 vector<Rect>& objects, 45 vector<Rect>& objects,
77 vector<int>& rejectLevels, 46 vector<int>& rejectLevels,
78 vector<double>& levelWeights, 47 vector<double>& levelWeights,
79 double scaleFactor=1.1, 48 double scaleFactor=1.1,
80 int minNeighbors=3, 49 int minNeighbors=3,
81 Size minSize=Size(), 50 Size minSize=Size(),
82 - Size maxSize=Size(),  
83 - bool outputRejectLevels=false ); 51 + Size maxSize=Size()) const;
84 52
85 int predict(const Mat &image, double &weight) const; 53 int predict(const Mat &image, double &weight) const;
86 54
87 - class Data 55 + struct Node
88 { 56 {
89 - public:  
90 - struct DTreeNode  
91 - {  
92 - int featureIdx;  
93 - float threshold; // for ordered features only  
94 - int left;  
95 - int right;  
96 - };  
97 -  
98 - struct DTree  
99 - {  
100 - int nodeCount;  
101 - };  
102 -  
103 - struct Stage  
104 - {  
105 - int first;  
106 - int ntrees;  
107 - float threshold;  
108 - };  
109 -  
110 - bool read(const FileNode &node);  
111 -  
112 - bool isStumpBased;  
113 -  
114 - int stageType;  
115 - int featureType;  
116 - int ncategories;  
117 - Size origWinSize; 57 + Node() : left(NULL), right(NULL) {}
  58 +
  59 + int featureIdx;
  60 + float threshold; // for ordered features only
  61 + QList<int> subset; // for categorical features only
  62 + float value; // for leaf nodes only
  63 + Node *left;
  64 + Node *right;
  65 + };
118 66
119 - vector<Stage> stages;  
120 - vector<DTree> classifiers;  
121 - vector<DTreeNode> nodes;  
122 - vector<float> leaves;  
123 - vector<int> subsets; 67 + struct Stage
  68 + {
  69 + QList<Node*> trees;
  70 + float threshold;
124 }; 71 };
125 72
126 - Data data; 73 + QList<Stage> stages;
127 Representation *representation; 74 Representation *representation;
128 }; 75 };
129 76
openbr/plugins/metadata/cascade.cpp
@@ -75,7 +75,7 @@ class CascadeTransform : public UntrainableMetaTransform @@ -75,7 +75,7 @@ class CascadeTransform : public UntrainableMetaTransform
75 BR_PROPERTY(QString, model, "FrontalFace") 75 BR_PROPERTY(QString, model, "FrontalFace")
76 BR_PROPERTY(int, minSize, 64) 76 BR_PROPERTY(int, minSize, 64)
77 BR_PROPERTY(int, minNeighbors, 5) 77 BR_PROPERTY(int, minNeighbors, 5)
78 - BR_PROPERTY(bool, ROCMode, false) 78 + BR_PROPERTY(bool, ROCMode, false)
79 79
80 Resource<_CascadeClassifier> cascadeResource; 80 Resource<_CascadeClassifier> cascadeResource;
81 81
@@ -112,8 +112,7 @@ class CascadeTransform : public UntrainableMetaTransform @@ -112,8 +112,7 @@ class CascadeTransform : public UntrainableMetaTransform
112 std::vector<Rect> rects; 112 std::vector<Rect> rects;
113 std::vector<int> rejectLevels; 113 std::vector<int> rejectLevels;
114 std::vector<double> levelWeights; 114 std::vector<double> levelWeights;
115 - if (ROCMode) cascade->detectMultiScale(m, rects, rejectLevels, levelWeights, 1.2, minNeighbors, Size(minSize, minSize), Size(), true);  
116 - else cascade->detectMultiScale(m, rects, 1.2, minNeighbors, Size(minSize, minSize)); 115 + cascade->detectMultiScale(m, rects, rejectLevels, levelWeights, 1.2, minNeighbors, Size(minSize, minSize), Size());
117 116
118 if (!enrollAll && rects.empty()) 117 if (!enrollAll && rects.empty())
119 rects.push_back(Rect(0, 0, m.cols, m.rows)); 118 rects.push_back(Rect(0, 0, m.cols, m.rows));