Commit d14551dda5f03911db8ccc8c4fab0a728b9a1771

Authored by Jordan Cheney
1 parent 9eaaf62b

Reverted 2 commits to avoid regression. Added first attempt at Haar representation

openbr/core/boost.cpp
... ... @@ -154,11 +154,6 @@ void FeatureEvaluator::setImage(const Mat &img, uchar clsLabel, int idx)
154 154 representation->preprocess(img, integralImg);
155 155 }
156 156  
157   -void FeatureEvaluator::writeFeatures(FileStorage &fs, const Mat &featureMap) const
158   -{
159   - representation->write(fs, featureMap);
160   -}
161   -
162 157 //----------------------------- CascadeBoostParams -------------------------------------------------
163 158  
164 159 CascadeBoostParams::CascadeBoostParams() : minHitRate( 0.995F), maxFalseAlarm( 0.5F )
... ... @@ -178,21 +173,6 @@ CascadeBoostParams::CascadeBoostParams( int _boostType,
178 173 use_surrogates = use_1se_rule = truncate_pruned_tree = false;
179 174 }
180 175  
181   -void CascadeBoostParams::write( FileStorage &fs ) const
182   -{
183   - string boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
184   - boost_type == CvBoost::REAL ? CC_REAL_BOOST :
185   - boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
186   - boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : string();
187   - CV_Assert( !boostTypeStr.empty() );
188   - fs << CC_BOOST_TYPE << boostTypeStr;
189   - fs << CC_MINHITRATE << minHitRate;
190   - fs << CC_MAXFALSEALARM << maxFalseAlarm;
191   - fs << CC_TRIM_RATE << weight_trim_rate;
192   - fs << CC_MAX_DEPTH << max_depth;
193   - fs << CC_WEAK_COUNT << weak_count;
194   -}
195   -
196 176 //---------------------------- CascadeBoostTrainData -----------------------------
197 177  
198 178 CvDTreeNode* CascadeBoostTrainData::subsample_data( const CvMat* _subsample_idx )
... ... @@ -826,7 +806,7 @@ CvDTreeNode* CascadeBoostTree::predict( int sampleIdx ) const
826 806 return node;
827 807 }
828 808  
829   -/*
  809 +
830 810 static void writeRecursive(FileStorage &fs, CvDTreeNode *node, int maxCatCount)
831 811 {
832 812 bool hasChildren = node->left ? true : false;
... ... @@ -835,7 +815,7 @@ static void writeRecursive(FileStorage &amp;fs, CvDTreeNode *node, int maxCatCount)
835 815 if (!hasChildren) // Write the leaf value
836 816 fs << "value" << node->value; // value of the node. Only relevant for leaf nodes
837 817 else { // Write the splitting information and then the children
838   - if (maxCatCount > 0) {
  818 + if (maxCatCount > 1) {
839 819 fs << "subset" << "[:";
840 820 for (int i = 0; i < ((maxCatCount + 31) / 32); i++)
841 821 fs << node->split->subset[i]; // subset to split on (categorical features)
... ... @@ -857,7 +837,7 @@ void CascadeBoostTree::write(FileStorage &amp;fs)
857 837 writeRecursive(fs, root, ((CascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount());
858 838 fs << "}";
859 839 }
860   -
  840 +/*
861 841 static void readRecursive(const FileNode &fn, CvDTreeNode *node, CvDTreeTrainData *data)
862 842 {
863 843 bool hasChildren = (int)fn["hasChildren"];
... ... @@ -897,67 +877,8 @@ void CascadeBoostTree::read(const FileNode &amp;fn, CvBoost* _ensemble, CvDTreeTrain
897 877  
898 878 root = data->new_node(0, 0, 0, 0);
899 879 readRecursive(fn, root, data);
900   -}*/
901   -
902   -void CascadeBoostTree::write(FileStorage &fs)
903   -{
904   - int maxCatCount = ((CascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount();
905   - int subsetN = (maxCatCount + 31)/32;
906   - queue<CvDTreeNode*> internalNodesQueue;
907   - int size = (int)pow( 2.f, (float)ensemble->get_params().max_depth);
908   - Ptr<float> leafVals = new float[size];
909   - int leafValIdx = 0;
910   - int internalNodeIdx = 1;
911   - CvDTreeNode* tempNode;
912   -
913   - CV_DbgAssert( root );
914   - internalNodesQueue.push( root );
915   -
916   - fs << "{";
917   - fs << CC_INTERNAL_NODES << "[:";
918   - while (!internalNodesQueue.empty())
919   - {
920   - tempNode = internalNodesQueue.front();
921   - CV_Assert( tempNode->left );
922   - if ( !tempNode->left->left && !tempNode->left->right) // left node is leaf
923   - {
924   - leafVals[-leafValIdx] = (float)tempNode->left->value;
925   - fs << leafValIdx-- ;
926   - }
927   - else
928   - {
929   - internalNodesQueue.push( tempNode->left );
930   - fs << internalNodeIdx++;
931   - }
932   - CV_Assert( tempNode->right );
933   - if ( !tempNode->right->left && !tempNode->right->right) // right node is leaf
934   - {
935   - leafVals[-leafValIdx] = (float)tempNode->right->value;
936   - fs << leafValIdx--;
937   - }
938   - else
939   - {
940   - internalNodesQueue.push( tempNode->right );
941   - fs << internalNodeIdx++;
942   - }
943   - int fidx = tempNode->split->var_idx;
944   -
945   - fs << fidx;
946   - if ( !maxCatCount )
947   - fs << tempNode->split->ord.c;
948   - else
949   - for( int i = 0; i < subsetN; i++ )
950   - fs << tempNode->split->subset[i];
951   - internalNodesQueue.pop();
952   - }
953   - fs << "]"; // CC_INTERNAL_NODES
954   -
955   - fs << CC_LEAF_VALUES << "[:";
956   - for (int ni = 0; ni < -leafValIdx; ni++)
957   - fs << leafVals[ni];
958   - fs << "]"; // CC_LEAF_VALUES
959   - fs << "}";
960 880 }
  881 +*/
961 882  
962 883 void CascadeBoostTree::split_node_data( CvDTreeNode* node )
963 884 {
... ... @@ -1214,6 +1135,7 @@ bool CascadeBoost::train( const FeatureEvaluator* _featureEvaluator,
1214 1135 break;
1215 1136 }
1216 1137  
  1138 + classifiers.append(tree);
1217 1139 cvSeqPush( weak, &tree );
1218 1140 update_weights( tree );
1219 1141 trim_weights();
... ... @@ -1542,20 +1464,3 @@ bool CascadeBoost::isErrDesired()
1542 1464 return falseAlarm <= maxFalseAlarm;
1543 1465 }
1544 1466  
1545   -void CascadeBoost::write(FileStorage &fs) const
1546   -{
1547   -// char cmnt[30];
1548   - CascadeBoostTree* weakTree;
1549   - fs << CC_WEAK_COUNT << weak->total;
1550   - fs << CC_STAGE_THRESHOLD << threshold;
1551   - fs << CC_WEAK_CLASSIFIERS << "[";
1552   - for( int wi = 0; wi < weak->total; wi++)
1553   - {
1554   - /*sprintf( cmnt, "tree %i", wi );
1555   - cvWriteComment( fs, cmnt, 0 );*/
1556   - weakTree = *((CascadeBoostTree**) cvGetSeqElem( weak, wi ));
1557   - weakTree->write(fs);
1558   - }
1559   - fs << "]";
1560   -}
1561   -
... ...
openbr/core/boost.h
... ... @@ -55,7 +55,6 @@ struct FeatureEvaluator
55 55 ~FeatureEvaluator() {}
56 56 void init(Representation *_representation, int _maxSampleCount);
57 57 void setImage(const cv::Mat& img, uchar clsLabel, int idx);
58   - void writeFeatures(cv::FileStorage &fs, const cv::Mat& featureMap) const;
59 58 float operator()(int featureIdx, int sampleIdx) const { return representation->evaluate(data.row(sampleIdx), featureIdx); }
60 59  
61 60 int getNumFeatures() const { return representation->numFeatures(); }
... ... @@ -77,7 +76,6 @@ struct CascadeBoostParams : CvBoostParams
77 76 CascadeBoostParams(int _boostType, float _minHitRate, float _maxFalseAlarm,
78 77 double _weightTrimRate, int _maxDepth, int _maxWeakCount);
79 78 virtual ~CascadeBoostParams() {}
80   - void write( cv::FileStorage &fs ) const;
81 79 };
82 80  
83 81 struct CascadeBoostTrainData : CvDTreeTrainData
... ... @@ -128,13 +126,15 @@ public:
128 126 virtual float predict( int sampleIdx, bool returnSum = false ) const;
129 127  
130 128 float getThreshold() const { return threshold; }
131   - void write(cv::FileStorage &fs) const;
  129 + QList<CvBoostTree*> getClassifiers() const { return classifiers; }
132 130  
133 131 protected:
134 132 virtual bool set_params(const CvBoostParams& _params);
135 133 virtual void update_weights(CvBoostTree* tree);
136 134 virtual bool isErrDesired();
137 135  
  136 + QList<CvBoostTree*> classifiers;
  137 +
138 138 float threshold;
139 139 float minHitRate, maxFalseAlarm;
140 140 };
... ...
openbr/plugins/classification/boostedforest.cpp
... ... @@ -6,6 +6,64 @@ using namespace cv;
6 6 namespace br
7 7 {
8 8  
  9 +struct Node
  10 +{
  11 + Node() : left(NULL), right(NULL) {}
  12 +
  13 + float value;
  14 +
  15 + float threshold; // For ordered features
  16 + QList<int> subset; // For categorical features
  17 + int featureIdx;
  18 +
  19 + Node *left;
  20 + Node *right;
  21 +};
  22 +
  23 +static void buildTreeRecursive(Node *node, const CvDTreeNode *cv_node, int maxCatCount)
  24 +{
  25 + if (!cv_node->left) // Write the leaf value
  26 + node->value = cv_node->value; // value of the node. Only relevant for leaf nodes
  27 + else { // Write the splitting information and then the children
  28 + if (maxCatCount > 1)
  29 + for (int i = 0; i < ((maxCatCount + 31) / 32); i++)
  30 + node->subset.append(cv_node->split->subset[i]); // subset to split on (categorical features)
  31 + else
  32 + node->threshold = cv_node->split->ord.c; // threshold to split on (ordered features)
  33 +
  34 + node->featureIdx = cv_node->split->var_idx; // feature idx of node
  35 +
  36 + node->left = new Node;
  37 + buildTreeRecursive(node->left, cv_node->left, maxCatCount);
  38 + node->right = new Node;
  39 + buildTreeRecursive(node->right, cv_node->right, maxCatCount);
  40 + }
  41 +}
  42 +
  43 +static void writeRecursive(FileStorage &fs, const Node *node, int maxCatCount)
  44 +{
  45 + bool hasChildren = node->left ? true : false;
  46 + fs << "hasChildren" << hasChildren;
  47 +
  48 + if (!hasChildren) // Write the leaf value
  49 + fs << "value" << node->value; // value of the node. Only relevant for leaf nodes
  50 + else { // Write the splitting information and then the children
  51 + if (maxCatCount > 1) {
  52 + fs << "subset" << "[:";
  53 + for (int i = 0; i < ((maxCatCount + 31) / 32); i++)
  54 + fs << node->subset[i]; // subset to split on (categorical features)
  55 + fs << "]";
  56 + } else {
  57 + fs << "threshold" << node->threshold; // threshold to split on (ordered features)
  58 + }
  59 +
  60 + fs << "feature_idx" << node->featureIdx; // feature idx of node
  61 +
  62 + fs << "left" << "{"; writeRecursive(fs, node->left, maxCatCount); fs << "}"; // write left child
  63 + fs << "right" << "{"; writeRecursive(fs, node->right, maxCatCount); fs << "}"; // write right child
  64 + }
  65 +}
  66 +
9 67 class BoostedForestClassifier : public Classifier
10 68 {
11 69 Q_OBJECT
... ... @@ -24,27 +82,49 @@ class BoostedForestClassifier : public Classifier
24 82 BR_PROPERTY(int, maxDepth, 1)
25 83 BR_PROPERTY(int, maxWeakCount, 100)
26 84  
27   - CascadeBoost *boost;
28   - FeatureEvaluator *featureEvaluator;
  85 + QList<Node*> weakClassifiers;
  86 + float threshold;
29 87  
30 88 void train(const QList<Mat> &images, const QList<float> &labels)
31 89 {
32 90 CascadeBoostParams params(CvBoost::GENTLE, minTAR, maxFAR, trimRate, maxDepth, maxWeakCount);
33 91  
34   - featureEvaluator = new FeatureEvaluator;
35   - featureEvaluator->init(representation, images.size());
  92 + FeatureEvaluator featureEvaluator;
  93 + featureEvaluator.init(representation, images.size());
36 94  
37 95 for (int i = 0; i < images.size(); i++)
38   - featureEvaluator->setImage(images[i], labels[i], i);
  96 + featureEvaluator.setImage(images[i], labels[i], i);
  97 +
  98 + CascadeBoost boost;
  99 + boost.train(&featureEvaluator, images.size(), 2048, 2048, params);
  100 +
  101 + threshold = boost.getThreshold();
39 102  
40   - boost = new CascadeBoost;
41   - boost->train(featureEvaluator, images.size(), 1024, 1024, params);
  103 + foreach (const CvBoostTree *tree, boost.getClassifiers()) {
  104 + Node *root = new Node;
  105 + buildTreeRecursive(root, tree->get_root(), representation->maxCatCount());
  106 + weakClassifiers.append(root);
  107 + }
42 108 }
43 109  
44 110 float classify(const Mat &image) const
45 111 {
46   - featureEvaluator->setImage(image, 0, 0);
47   - return boost->predict(0);
  112 + float sum = 0;
  113 + foreach (const Node *root, weakClassifiers) {
  114 + const Node *node = root;
  115 +
  116 + while (node->left) {
  117 + if (representation->maxCatCount() > 1) {
  118 + int c = (int)representation->evaluate(image, node->featureIdx);
  119 + node = (node->subset[c >> 5] & (1 << (c & 31))) ? node->left : node->right;
  120 + } else {
  121 + float val = representation->evaluate(image, node->featureIdx);
  122 + node = val < node->threshold ? node->left : node->right;
  123 + }
  124 + }
  125 + sum += node->value;
  126 + }
  127 + return sum < threshold - FLT_EPSILON ? 0.0 : 1.0;
48 128 }
49 129  
50 130 int numFeatures() const
... ... @@ -64,7 +144,15 @@ class BoostedForestClassifier : public Classifier
64 144  
65 145 void write(FileStorage &fs) const
66 146 {
67   - boost->write(fs);
  147 + fs << "weakCount" << weakClassifiers.size();
  148 + fs << "stageThreshold" << threshold;
  149 + fs << "weakClassifiers" << "[";
  150 + foreach (const Node *root, weakClassifiers) {
  151 + fs << "{";
  152 + writeRecursive(fs, root, representation->maxCatCount());
  153 + fs << "}";
  154 + }
  155 + fs << "]";
68 156 }
69 157 };
70 158  
... ...
openbr/plugins/classification/cascade.cpp
... ... @@ -178,22 +178,9 @@ class CascadeClassifier : public Classifier
178 178  
179 179 void write(FileStorage &fs) const
180 180 {
181   - fs << CC_STAGE_TYPE << CC_BOOST;
182   - fs << CC_FEATURE_TYPE << CC_LBP;
183   - fs << CC_HEIGHT << 24;
184   - fs << CC_WIDTH << 24;
  181 + fs << "stageCount" << stages.size();
185 182  
186   - CascadeBoostParams stageParams(CvBoost::GINI, 0.999, 0.5, 0.95, 1, 200);
187   - fs << CC_STAGE_PARAMS << "{"; stageParams.write( fs ); fs << "}";
188   -
189   - fs << CC_FEATURE_PARAMS << "{";
190   - fs << CC_MAX_CAT_COUNT << stages.first()->maxCatCount();
191   - fs << CC_FEATURE_SIZE << 1;
192   - fs << "}";
193   -
194   - fs << CC_STAGE_NUM << stages.size();
195   -
196   - fs << CC_STAGES << "[";
  183 + fs << "stages" << "[";
197 184 foreach (const Classifier *stage, stages) {
198 185 fs << "{";
199 186 stage->write(fs);
... ...
openbr/plugins/representation/haar.cpp 0 โ†’ 100644
  1 +#include <opencv2/imgproc/imgproc.hpp>
  2 +
  3 +#include <openbr/plugins/openbr_internal.h>
  4 +#include <openbr/core/opencvutils.h>
  5 +
  6 +using namespace cv;
  7 +
  8 +namespace br
  9 +{
  10 +
  11 +#define CV_SUM_OFFSETS( p0, p1, p2, p3, rect, step ) \
  12 + /* (x, y) */ \
  13 + (p0) = (rect).x + (step) * (rect).y; \
  14 + /* (x + w, y) */ \
  15 + (p1) = (rect).x + (rect).width + (step) * (rect).y; \
  16 + /* (x + w, y) */ \
  17 + (p2) = (rect).x + (step) * ((rect).y + (rect).height); \
  18 + /* (x + w, y + h) */ \
  19 + (p3) = (rect).x + (rect).width + (step) * ((rect).y + (rect).height);
  20 +
  21 +class HaarRepresentation : public Representation
  22 +{
  23 + Q_OBJECT
  24 +
  25 + Q_PROPERTY(int winWidth READ get_winWidth WRITE set_winWidth RESET reset_winWidth STORED false)
  26 + Q_PROPERTY(int winHeight READ get_winHeight WRITE set_winHeight RESET reset_winHeight STORED false)
  27 + BR_PROPERTY(int, winWidth, 24)
  28 + BR_PROPERTY(int, winHeight, 24)
  29 +
  30 + void init()
  31 + {
  32 + int offset = winWidth + 1;
  33 + for (int x = 0; x < winWidth; x++) {
  34 + for (int y = 0; y < winHeight; y++) {
  35 + for (int dx = 1; dx <= winWidth; dx++) {
  36 + for (int dy = 1; dy <= winHeight; dy++) {
  37 + // haar_x2
  38 + if ((x+dx*2 <= winWidth) && (y+dy <= winHeight))
  39 + features.append(Feature(offset,
  40 + x, y, dx*2, dy, -1,
  41 + x+dx, y, dx , dy, +2));
  42 + // haar_y2
  43 + if ((x+dx <= winWidth) && (y+dy*2 <= winHeight))
  44 + features.append(Feature(offset,
  45 + x, y, dx, dy*2, -1,
  46 + x, y+dy, dx, dy, +2));
  47 + // haar_x3
  48 + if ((x+dx*3 <= winWidth) && (y+dy <= winHeight))
  49 + features.append(Feature(offset,
  50 + x, y, dx*3, dy, -1,
  51 + x+dx, y, dx , dy, +3));
  52 + // haar_y3
  53 + if ((x+dx <= winWidth) && (y+dy*3 <= winHeight))
  54 + features.append(Feature(offset,
  55 + x, y, dx, dy*3, -1,
  56 + x, y+dy, dx, dy, +3));
  57 + // x2_y2
  58 + if ((x+dx*2 <= winWidth) && (y+dy*2 <= winHeight))
  59 + features.append(Feature(offset,
  60 + x, y, dx*2, dy*2, -1,
  61 + x, y, dx, dy, +2,
  62 + x+dx, y+dy, dx, dy, +2));
  63 +
  64 +
  65 + }
  66 + }
  67 + }
  68 + }
  69 + }
  70 +
  71 + void preprocess(const Mat &src, Mat &dst) const
  72 + {
  73 + integral(src, dst);
  74 + }
  75 +
  76 + float evaluate(const Mat &image, int idx) const
  77 + {
  78 + return (float)features[idx].calc(image);
  79 + }
  80 +
  81 + Mat evaluate(const Mat &image, const QList<int> &indices) const
  82 + {
  83 + int size = indices.empty() ? numFeatures() : indices.size();
  84 +
  85 + Mat result(1, size, CV_32FC1);
  86 + for (int i = 0; i < size; i++)
  87 + result.at<float>(i) = evaluate(image, indices.empty() ? i : indices[i]);
  88 + return result;
  89 + }
  90 +
  91 + int numFeatures() const { return features.size(); }
  92 + Size preWindowSize() const { return Size(winWidth, winHeight); }
  93 + Size postWindowSize() const { return Size(winWidth + 1, winHeight + 1); }
  94 + int maxCatCount() const { return 0; }
  95 +
  96 + struct Feature
  97 + {
  98 + Feature();
  99 + Feature( int offset,
  100 + int x0, int y0, int w0, int h0, float wt0,
  101 + int x1, int y1, int w1, int h1, float wt1,
  102 + int x2 = 0, int y2 = 0, int w2 = 0, int h2 = 0, float wt2 = 0.0F );
  103 + float calc(const Mat &img) const;
  104 +
  105 + struct {
  106 + Rect r;
  107 + float weight;
  108 + } rect[3];
  109 +
  110 + struct {
  111 + int p0, p1, p2, p3;
  112 + } fastRect[3];
  113 + };
  114 +
  115 + QList<Feature> features;
  116 +};
  117 +
  118 +BR_REGISTER(Representation, HaarRepresentation)
  119 +
  120 +HaarRepresentation::Feature::Feature()
  121 +{
  122 + rect[0].r = rect[1].r = rect[2].r = Rect(0,0,0,0);
  123 + rect[0].weight = rect[1].weight = rect[2].weight = 0;
  124 +}
  125 +
  126 +HaarRepresentation::Feature::Feature(int offset,
  127 + int x0, int y0, int w0, int h0, float wt0,
  128 + int x1, int y1, int w1, int h1, float wt1,
  129 + int x2, int y2, int w2, int h2, float wt2)
  130 +{
  131 + rect[0].r.x = x0;
  132 + rect[0].r.y = y0;
  133 + rect[0].r.width = w0;
  134 + rect[0].r.height = h0;
  135 + rect[0].weight = wt0;
  136 +
  137 + rect[1].r.x = x1;
  138 + rect[1].r.y = y1;
  139 + rect[1].r.width = w1;
  140 + rect[1].r.height = h1;
  141 + rect[1].weight = wt1;
  142 +
  143 + rect[2].r.x = x2;
  144 + rect[2].r.y = y2;
  145 + rect[2].r.width = w2;
  146 + rect[2].r.height = h2;
  147 + rect[2].weight = wt2;
  148 +
  149 + for (int j = 0; j < 3; j++) {
  150 + if( rect[j].weight == 0.0F )
  151 + break;
  152 + CV_SUM_OFFSETS(fastRect[j].p0, fastRect[j].p1, fastRect[j].p2, fastRect[j].p3, rect[j].r, offset)
  153 + }
  154 +}
  155 +
  156 +inline float HaarRepresentation::Feature::calc(const Mat &img) const
  157 +{
  158 + const int* ptr = img.ptr<int>();
  159 + float ret = rect[0].weight * (ptr[fastRect[0].p0] - ptr[fastRect[0].p1] - ptr[fastRect[0].p2] + ptr[fastRect[0].p3]) +
  160 + rect[1].weight * (ptr[fastRect[1].p0] - ptr[fastRect[1].p1] - ptr[fastRect[1].p2] + ptr[fastRect[1].p3]);
  161 + if (rect[2].weight != 0.0f)
  162 + ret += rect[2].weight * (ptr[fastRect[2].p0] - ptr[fastRect[2].p1] - ptr[fastRect[2].p2] + ptr[fastRect[2].p3]);
  163 + return ret;
  164 +}
  165 +
  166 +} // namespace br
  167 +
  168 +#include "representation/haar.moc"
  169 +
... ...
openbr/plugins/representation/mblbp.cpp
... ... @@ -58,7 +58,6 @@ class MBLBPRepresentation : public Representation
58 58 return result;
59 59 }
60 60  
61   - void write(FileStorage &fs, const Mat &featureMap);
62 61 int numFeatures() const { return features.size(); }
63 62 Size preWindowSize() const { return Size(winWidth, winHeight); }
64 63 Size postWindowSize() const { return Size(winWidth + 1, winHeight + 1); }
... ... @@ -79,20 +78,6 @@ class MBLBPRepresentation : public Representation
79 78  
80 79 BR_REGISTER(Representation, MBLBPRepresentation)
81 80  
82   -void MBLBPRepresentation::write(FileStorage &fs, const Mat &featureMap)
83   -{
84   - fs << "features" << "[";
85   - const Mat_<int>& featureMap_ = (const Mat_<int>&)featureMap;
86   - for ( int fi = 0; fi < featureMap.cols; fi++ )
87   - if ( featureMap_(0, fi) >= 0 )
88   - {
89   - fs << "{";
90   - features[fi].write( fs );
91   - fs << "}";
92   - }
93   - fs << "]";
94   -}
95   -
96 81 MBLBPRepresentation::Feature::Feature( int offset, int x, int y, int _blockWidth, int _blockHeight )
97 82 {
98 83 Rect tr = rect = cvRect(x, y, _blockWidth, _blockHeight);
... ...