diff --git a/openbr/core/boost.cpp b/openbr/core/boost.cpp index 16ed736..abf4de2 100644 --- a/openbr/core/boost.cpp +++ b/openbr/core/boost.cpp @@ -154,11 +154,6 @@ void FeatureEvaluator::setImage(const Mat &img, uchar clsLabel, int idx) representation->preprocess(img, integralImg); } -void FeatureEvaluator::writeFeatures(FileStorage &fs, const Mat &featureMap) const -{ - representation->write(fs, featureMap); -} - //----------------------------- CascadeBoostParams ------------------------------------------------- CascadeBoostParams::CascadeBoostParams() : minHitRate( 0.995F), maxFalseAlarm( 0.5F ) @@ -178,21 +173,6 @@ CascadeBoostParams::CascadeBoostParams( int _boostType, use_surrogates = use_1se_rule = truncate_pruned_tree = false; } -void CascadeBoostParams::write( FileStorage &fs ) const -{ - string boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST : - boost_type == CvBoost::REAL ? CC_REAL_BOOST : - boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST : - boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : string(); - CV_Assert( !boostTypeStr.empty() ); - fs << CC_BOOST_TYPE << boostTypeStr; - fs << CC_MINHITRATE << minHitRate; - fs << CC_MAXFALSEALARM << maxFalseAlarm; - fs << CC_TRIM_RATE << weight_trim_rate; - fs << CC_MAX_DEPTH << max_depth; - fs << CC_WEAK_COUNT << weak_count; -} - //---------------------------- CascadeBoostTrainData ----------------------------- CvDTreeNode* CascadeBoostTrainData::subsample_data( const CvMat* _subsample_idx ) @@ -826,7 +806,7 @@ CvDTreeNode* CascadeBoostTree::predict( int sampleIdx ) const return node; } -/* + static void writeRecursive(FileStorage &fs, CvDTreeNode *node, int maxCatCount) { bool hasChildren = node->left ? true : false; @@ -835,7 +815,7 @@ static void writeRecursive(FileStorage &fs, CvDTreeNode *node, int maxCatCount) if (!hasChildren) // Write the leaf value fs << "value" << node->value; // value of the node. Only relevant for leaf nodes else { // Write the splitting information and then the children - if (maxCatCount > 0) { + if (maxCatCount > 1) { fs << "subset" << "[:"; for (int i = 0; i < ((maxCatCount + 31) / 32); i++) fs << node->split->subset[i]; // subset to split on (categorical features) @@ -857,7 +837,7 @@ void CascadeBoostTree::write(FileStorage &fs) writeRecursive(fs, root, ((CascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount()); fs << "}"; } - +/* static void readRecursive(const FileNode &fn, CvDTreeNode *node, CvDTreeTrainData *data) { bool hasChildren = (int)fn["hasChildren"]; @@ -897,67 +877,8 @@ void CascadeBoostTree::read(const FileNode &fn, CvBoost* _ensemble, CvDTreeTrain root = data->new_node(0, 0, 0, 0); readRecursive(fn, root, data); -}*/ - -void CascadeBoostTree::write(FileStorage &fs) -{ - int maxCatCount = ((CascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount(); - int subsetN = (maxCatCount + 31)/32; - queue internalNodesQueue; - int size = (int)pow( 2.f, (float)ensemble->get_params().max_depth); - Ptr leafVals = new float[size]; - int leafValIdx = 0; - int internalNodeIdx = 1; - CvDTreeNode* tempNode; - - CV_DbgAssert( root ); - internalNodesQueue.push( root ); - - fs << "{"; - fs << CC_INTERNAL_NODES << "[:"; - while (!internalNodesQueue.empty()) - { - tempNode = internalNodesQueue.front(); - CV_Assert( tempNode->left ); - if ( !tempNode->left->left && !tempNode->left->right) // left node is leaf - { - leafVals[-leafValIdx] = (float)tempNode->left->value; - fs << leafValIdx-- ; - } - else - { - internalNodesQueue.push( tempNode->left ); - fs << internalNodeIdx++; - } - CV_Assert( tempNode->right ); - if ( !tempNode->right->left && !tempNode->right->right) // right node is leaf - { - leafVals[-leafValIdx] = (float)tempNode->right->value; - fs << leafValIdx--; - } - else - { - internalNodesQueue.push( tempNode->right ); - fs << internalNodeIdx++; - } - int fidx = tempNode->split->var_idx; - - fs << fidx; - if ( !maxCatCount ) - fs << tempNode->split->ord.c; - else - for( int i = 0; i < subsetN; i++ ) - fs << tempNode->split->subset[i]; - internalNodesQueue.pop(); - } - fs << "]"; // CC_INTERNAL_NODES - - fs << CC_LEAF_VALUES << "[:"; - for (int ni = 0; ni < -leafValIdx; ni++) - fs << leafVals[ni]; - fs << "]"; // CC_LEAF_VALUES - fs << "}"; } +*/ void CascadeBoostTree::split_node_data( CvDTreeNode* node ) { @@ -1214,6 +1135,7 @@ bool CascadeBoost::train( const FeatureEvaluator* _featureEvaluator, break; } + classifiers.append(tree); cvSeqPush( weak, &tree ); update_weights( tree ); trim_weights(); @@ -1542,20 +1464,3 @@ bool CascadeBoost::isErrDesired() return falseAlarm <= maxFalseAlarm; } -void CascadeBoost::write(FileStorage &fs) const -{ -// char cmnt[30]; - CascadeBoostTree* weakTree; - fs << CC_WEAK_COUNT << weak->total; - fs << CC_STAGE_THRESHOLD << threshold; - fs << CC_WEAK_CLASSIFIERS << "["; - for( int wi = 0; wi < weak->total; wi++) - { - /*sprintf( cmnt, "tree %i", wi ); - cvWriteComment( fs, cmnt, 0 );*/ - weakTree = *((CascadeBoostTree**) cvGetSeqElem( weak, wi )); - weakTree->write(fs); - } - fs << "]"; -} - diff --git a/openbr/core/boost.h b/openbr/core/boost.h index 136143a..d0d9635 100644 --- a/openbr/core/boost.h +++ b/openbr/core/boost.h @@ -55,7 +55,6 @@ struct FeatureEvaluator ~FeatureEvaluator() {} void init(Representation *_representation, int _maxSampleCount); void setImage(const cv::Mat& img, uchar clsLabel, int idx); - void writeFeatures(cv::FileStorage &fs, const cv::Mat& featureMap) const; float operator()(int featureIdx, int sampleIdx) const { return representation->evaluate(data.row(sampleIdx), featureIdx); } int getNumFeatures() const { return representation->numFeatures(); } @@ -77,7 +76,6 @@ struct CascadeBoostParams : CvBoostParams CascadeBoostParams(int _boostType, float _minHitRate, float _maxFalseAlarm, double _weightTrimRate, int _maxDepth, int _maxWeakCount); virtual ~CascadeBoostParams() {} - void write( cv::FileStorage &fs ) const; }; struct CascadeBoostTrainData : CvDTreeTrainData @@ -128,13 +126,15 @@ public: virtual float predict( int sampleIdx, bool returnSum = false ) const; float getThreshold() const { return threshold; } - void write(cv::FileStorage &fs) const; + QList getClassifiers() const { return classifiers; } protected: virtual bool set_params(const CvBoostParams& _params); virtual void update_weights(CvBoostTree* tree); virtual bool isErrDesired(); + QList classifiers; + float threshold; float minHitRate, maxFalseAlarm; }; diff --git a/openbr/plugins/classification/boostedforest.cpp b/openbr/plugins/classification/boostedforest.cpp index fc4bb09..7aade36 100644 --- a/openbr/plugins/classification/boostedforest.cpp +++ b/openbr/plugins/classification/boostedforest.cpp @@ -6,6 +6,64 @@ using namespace cv; namespace br { +struct Node +{ + Node() : left(NULL), right(NULL) {} + + float value; + + float threshold; // For ordered features + QList subset; // For categorical features + int featureIdx; + + Node *left; + Node *right; +}; + +static void buildTreeRecursive(Node *node, const CvDTreeNode *cv_node, int maxCatCount) +{ + if (!cv_node->left) // Write the leaf value + node->value = cv_node->value; // value of the node. Only relevant for leaf nodes + else { // Write the splitting information and then the children + if (maxCatCount > 1) + for (int i = 0; i < ((maxCatCount + 31) / 32); i++) + node->subset.append(cv_node->split->subset[i]); // subset to split on (categorical features) + else + node->threshold = cv_node->split->ord.c; // threshold to split on (ordered features) + + node->featureIdx = cv_node->split->var_idx; // feature idx of node + + node->left = new Node; + buildTreeRecursive(node->left, cv_node->left, maxCatCount); + node->right = new Node; + buildTreeRecursive(node->right, cv_node->right, maxCatCount); + } +} + +static void writeRecursive(FileStorage &fs, const Node *node, int maxCatCount) +{ + bool hasChildren = node->left ? true : false; + fs << "hasChildren" << hasChildren; + + if (!hasChildren) // Write the leaf value + fs << "value" << node->value; // value of the node. Only relevant for leaf nodes + else { // Write the splitting information and then the children + if (maxCatCount > 1) { + fs << "subset" << "[:"; + for (int i = 0; i < ((maxCatCount + 31) / 32); i++) + fs << node->subset[i]; // subset to split on (categorical features) + fs << "]"; + } else { + fs << "threshold" << node->threshold; // threshold to split on (ordered features) + } + + fs << "feature_idx" << node->featureIdx; // feature idx of node + + fs << "left" << "{"; writeRecursive(fs, node->left, maxCatCount); fs << "}"; // write left child + fs << "right" << "{"; writeRecursive(fs, node->right, maxCatCount); fs << "}"; // write right child + } +} + class BoostedForestClassifier : public Classifier { Q_OBJECT @@ -24,27 +82,49 @@ class BoostedForestClassifier : public Classifier BR_PROPERTY(int, maxDepth, 1) BR_PROPERTY(int, maxWeakCount, 100) - CascadeBoost *boost; - FeatureEvaluator *featureEvaluator; + QList weakClassifiers; + float threshold; void train(const QList &images, const QList &labels) { CascadeBoostParams params(CvBoost::GENTLE, minTAR, maxFAR, trimRate, maxDepth, maxWeakCount); - featureEvaluator = new FeatureEvaluator; - featureEvaluator->init(representation, images.size()); + FeatureEvaluator featureEvaluator; + featureEvaluator.init(representation, images.size()); for (int i = 0; i < images.size(); i++) - featureEvaluator->setImage(images[i], labels[i], i); + featureEvaluator.setImage(images[i], labels[i], i); + + CascadeBoost boost; + boost.train(&featureEvaluator, images.size(), 2048, 2048, params); + + threshold = boost.getThreshold(); - boost = new CascadeBoost; - boost->train(featureEvaluator, images.size(), 1024, 1024, params); + foreach (const CvBoostTree *tree, boost.getClassifiers()) { + Node *root = new Node; + buildTreeRecursive(root, tree->get_root(), representation->maxCatCount()); + weakClassifiers.append(root); + } } float classify(const Mat &image) const { - featureEvaluator->setImage(image, 0, 0); - return boost->predict(0); + float sum = 0; + foreach (const Node *root, weakClassifiers) { + const Node *node = root; + + while (node->left) { + if (representation->maxCatCount() > 1) { + int c = (int)representation->evaluate(image, node->featureIdx); + node = (node->subset[c >> 5] & (1 << (c & 31))) ? node->left : node->right; + } else { + float val = representation->evaluate(image, node->featureIdx); + node = val < node->threshold ? node->left : node->right; + } + } + sum += node->value; + } + return sum < threshold - FLT_EPSILON ? 0.0 : 1.0; } int numFeatures() const @@ -64,7 +144,15 @@ class BoostedForestClassifier : public Classifier void write(FileStorage &fs) const { - boost->write(fs); + fs << "weakCount" << weakClassifiers.size(); + fs << "stageThreshold" << threshold; + fs << "weakClassifiers" << "["; + foreach (const Node *root, weakClassifiers) { + fs << "{"; + writeRecursive(fs, root, representation->maxCatCount()); + fs << "}"; + } + fs << "]"; } }; diff --git a/openbr/plugins/classification/cascade.cpp b/openbr/plugins/classification/cascade.cpp index ec00c67..0baec84 100644 --- a/openbr/plugins/classification/cascade.cpp +++ b/openbr/plugins/classification/cascade.cpp @@ -178,22 +178,9 @@ class CascadeClassifier : public Classifier void write(FileStorage &fs) const { - fs << CC_STAGE_TYPE << CC_BOOST; - fs << CC_FEATURE_TYPE << CC_LBP; - fs << CC_HEIGHT << 24; - fs << CC_WIDTH << 24; + fs << "stageCount" << stages.size(); - CascadeBoostParams stageParams(CvBoost::GINI, 0.999, 0.5, 0.95, 1, 200); - fs << CC_STAGE_PARAMS << "{"; stageParams.write( fs ); fs << "}"; - - fs << CC_FEATURE_PARAMS << "{"; - fs << CC_MAX_CAT_COUNT << stages.first()->maxCatCount(); - fs << CC_FEATURE_SIZE << 1; - fs << "}"; - - fs << CC_STAGE_NUM << stages.size(); - - fs << CC_STAGES << "["; + fs << "stages" << "["; foreach (const Classifier *stage, stages) { fs << "{"; stage->write(fs); diff --git a/openbr/plugins/representation/haar.cpp b/openbr/plugins/representation/haar.cpp new file mode 100644 index 0000000..8f612e7 --- /dev/null +++ b/openbr/plugins/representation/haar.cpp @@ -0,0 +1,169 @@ +#include + +#include +#include + +using namespace cv; + +namespace br +{ + +#define CV_SUM_OFFSETS( p0, p1, p2, p3, rect, step ) \ + /* (x, y) */ \ + (p0) = (rect).x + (step) * (rect).y; \ + /* (x + w, y) */ \ + (p1) = (rect).x + (rect).width + (step) * (rect).y; \ + /* (x + w, y) */ \ + (p2) = (rect).x + (step) * ((rect).y + (rect).height); \ + /* (x + w, y + h) */ \ + (p3) = (rect).x + (rect).width + (step) * ((rect).y + (rect).height); + +class HaarRepresentation : public Representation +{ + Q_OBJECT + + Q_PROPERTY(int winWidth READ get_winWidth WRITE set_winWidth RESET reset_winWidth STORED false) + Q_PROPERTY(int winHeight READ get_winHeight WRITE set_winHeight RESET reset_winHeight STORED false) + BR_PROPERTY(int, winWidth, 24) + BR_PROPERTY(int, winHeight, 24) + + void init() + { + int offset = winWidth + 1; + for (int x = 0; x < winWidth; x++) { + for (int y = 0; y < winHeight; y++) { + for (int dx = 1; dx <= winWidth; dx++) { + for (int dy = 1; dy <= winHeight; dy++) { + // haar_x2 + if ((x+dx*2 <= winWidth) && (y+dy <= winHeight)) + features.append(Feature(offset, + x, y, dx*2, dy, -1, + x+dx, y, dx , dy, +2)); + // haar_y2 + if ((x+dx <= winWidth) && (y+dy*2 <= winHeight)) + features.append(Feature(offset, + x, y, dx, dy*2, -1, + x, y+dy, dx, dy, +2)); + // haar_x3 + if ((x+dx*3 <= winWidth) && (y+dy <= winHeight)) + features.append(Feature(offset, + x, y, dx*3, dy, -1, + x+dx, y, dx , dy, +3)); + // haar_y3 + if ((x+dx <= winWidth) && (y+dy*3 <= winHeight)) + features.append(Feature(offset, + x, y, dx, dy*3, -1, + x, y+dy, dx, dy, +3)); + // x2_y2 + if ((x+dx*2 <= winWidth) && (y+dy*2 <= winHeight)) + features.append(Feature(offset, + x, y, dx*2, dy*2, -1, + x, y, dx, dy, +2, + x+dx, y+dy, dx, dy, +2)); + + + } + } + } + } + } + + void preprocess(const Mat &src, Mat &dst) const + { + integral(src, dst); + } + + float evaluate(const Mat &image, int idx) const + { + return (float)features[idx].calc(image); + } + + Mat evaluate(const Mat &image, const QList &indices) const + { + int size = indices.empty() ? numFeatures() : indices.size(); + + Mat result(1, size, CV_32FC1); + for (int i = 0; i < size; i++) + result.at(i) = evaluate(image, indices.empty() ? i : indices[i]); + return result; + } + + int numFeatures() const { return features.size(); } + Size preWindowSize() const { return Size(winWidth, winHeight); } + Size postWindowSize() const { return Size(winWidth + 1, winHeight + 1); } + int maxCatCount() const { return 0; } + + struct Feature + { + Feature(); + Feature( int offset, + int x0, int y0, int w0, int h0, float wt0, + int x1, int y1, int w1, int h1, float wt1, + int x2 = 0, int y2 = 0, int w2 = 0, int h2 = 0, float wt2 = 0.0F ); + float calc(const Mat &img) const; + + struct { + Rect r; + float weight; + } rect[3]; + + struct { + int p0, p1, p2, p3; + } fastRect[3]; + }; + + QList features; +}; + +BR_REGISTER(Representation, HaarRepresentation) + +HaarRepresentation::Feature::Feature() +{ + rect[0].r = rect[1].r = rect[2].r = Rect(0,0,0,0); + rect[0].weight = rect[1].weight = rect[2].weight = 0; +} + +HaarRepresentation::Feature::Feature(int offset, + int x0, int y0, int w0, int h0, float wt0, + int x1, int y1, int w1, int h1, float wt1, + int x2, int y2, int w2, int h2, float wt2) +{ + rect[0].r.x = x0; + rect[0].r.y = y0; + rect[0].r.width = w0; + rect[0].r.height = h0; + rect[0].weight = wt0; + + rect[1].r.x = x1; + rect[1].r.y = y1; + rect[1].r.width = w1; + rect[1].r.height = h1; + rect[1].weight = wt1; + + rect[2].r.x = x2; + rect[2].r.y = y2; + rect[2].r.width = w2; + rect[2].r.height = h2; + rect[2].weight = wt2; + + for (int j = 0; j < 3; j++) { + if( rect[j].weight == 0.0F ) + break; + CV_SUM_OFFSETS(fastRect[j].p0, fastRect[j].p1, fastRect[j].p2, fastRect[j].p3, rect[j].r, offset) + } +} + +inline float HaarRepresentation::Feature::calc(const Mat &img) const +{ + const int* ptr = img.ptr(); + float ret = rect[0].weight * (ptr[fastRect[0].p0] - ptr[fastRect[0].p1] - ptr[fastRect[0].p2] + ptr[fastRect[0].p3]) + + rect[1].weight * (ptr[fastRect[1].p0] - ptr[fastRect[1].p1] - ptr[fastRect[1].p2] + ptr[fastRect[1].p3]); + if (rect[2].weight != 0.0f) + ret += rect[2].weight * (ptr[fastRect[2].p0] - ptr[fastRect[2].p1] - ptr[fastRect[2].p2] + ptr[fastRect[2].p3]); + return ret; +} + +} // namespace br + +#include "representation/haar.moc" + diff --git a/openbr/plugins/representation/mblbp.cpp b/openbr/plugins/representation/mblbp.cpp index be74148..66d8137 100644 --- a/openbr/plugins/representation/mblbp.cpp +++ b/openbr/plugins/representation/mblbp.cpp @@ -58,7 +58,6 @@ class MBLBPRepresentation : public Representation return result; } - void write(FileStorage &fs, const Mat &featureMap); int numFeatures() const { return features.size(); } Size preWindowSize() const { return Size(winWidth, winHeight); } Size postWindowSize() const { return Size(winWidth + 1, winHeight + 1); } @@ -79,20 +78,6 @@ class MBLBPRepresentation : public Representation BR_REGISTER(Representation, MBLBPRepresentation) -void MBLBPRepresentation::write(FileStorage &fs, const Mat &featureMap) -{ - fs << "features" << "["; - const Mat_& featureMap_ = (const Mat_&)featureMap; - for ( int fi = 0; fi < featureMap.cols; fi++ ) - if ( featureMap_(0, fi) >= 0 ) - { - fs << "{"; - features[fi].write( fs ); - fs << "}"; - } - fs << "]"; -} - MBLBPRepresentation::Feature::Feature( int offset, int x, int y, int _blockWidth, int _blockHeight ) { Rect tr = rect = cvRect(x, y, _blockWidth, _blockHeight);