Commit fb1d5e05cacce2468c930192d4c7805280315874
1 parent
9f01003d
OpenBR serialization
Showing
7 changed files
with
51 additions
and
487 deletions
openbr/core/boost.cpp
| @@ -155,8 +155,8 @@ void FeatureEvaluator::setImage(const Mat &img, uchar clsLabel, int idx) | @@ -155,8 +155,8 @@ void FeatureEvaluator::setImage(const Mat &img, uchar clsLabel, int idx) | ||
| 155 | 155 | ||
| 156 | int dx, dy; | 156 | int dx, dy; |
| 157 | Size windowSize = representation->windowSize(&dx, &dy); | 157 | Size windowSize = representation->windowSize(&dx, &dy); |
| 158 | - Mat integralImg(Size(windowSize.width + dx, windowSize.height + dy), data.type(), data.ptr<int>(idx)); | ||
| 159 | - representation->preprocess(img, integralImg); | 158 | + Mat pp(Size(windowSize.width + dx, windowSize.height + dy), data.type(), data.ptr<int>(idx)); |
| 159 | + representation->preprocess(img, pp); | ||
| 160 | } | 160 | } |
| 161 | 161 | ||
| 162 | //----------------------------- CascadeBoostParams ------------------------------------------------- | 162 | //----------------------------- CascadeBoostParams ------------------------------------------------- |
| @@ -778,7 +778,7 @@ void CascadeBoostTrainData::precalculate() | @@ -778,7 +778,7 @@ void CascadeBoostTrainData::precalculate() | ||
| 778 | parallel_for_( Range(0, minNum), | 778 | parallel_for_( Range(0, minNum), |
| 779 | FeatureValAndIdxPrecalc(featureEvaluator, buf, &valCache, sample_count, is_buf_16u!=0) ); | 779 | FeatureValAndIdxPrecalc(featureEvaluator, buf, &valCache, sample_count, is_buf_16u!=0) ); |
| 780 | parallel_for_( Range(minNum, numPrecalcVal), | 780 | parallel_for_( Range(minNum, numPrecalcVal), |
| 781 | - FeatureValOnlyPrecalc(featureEvaluator, &valCache, sample_count) ); | 781 | + FeatureValOnlyPrecalc(featureEvaluator, &valCache, sample_count) ); |
| 782 | cout << "Precalculation time: " << (proctime + TIME( 0 )) << endl; | 782 | cout << "Precalculation time: " << (proctime + TIME( 0 )) << endl; |
| 783 | } | 783 | } |
| 784 | 784 | ||
| @@ -811,49 +811,6 @@ CvDTreeNode* CascadeBoostTree::predict( int sampleIdx ) const | @@ -811,49 +811,6 @@ CvDTreeNode* CascadeBoostTree::predict( int sampleIdx ) const | ||
| 811 | return node; | 811 | return node; |
| 812 | } | 812 | } |
| 813 | 813 | ||
| 814 | -/* | ||
| 815 | -static void readRecursive(const FileNode &fn, CvDTreeNode *node, CvDTreeTrainData *data) | ||
| 816 | -{ | ||
| 817 | - bool hasChildren = (int)fn["hasChildren"]; | ||
| 818 | - | ||
| 819 | - if (!hasChildren) | ||
| 820 | - node->value = (float)fn["value"]; | ||
| 821 | - else { | ||
| 822 | - int maxCatCount = ((CascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount(); | ||
| 823 | - if (maxCatCount > 0) { | ||
| 824 | - node->split = data->new_split_cat(0, 0); | ||
| 825 | - FileNode subset_node = fn["subset"]; FileNodeIterator subset_it = subset_node.begin(); | ||
| 826 | - for (int i = 0; i < (maxCatCount + 31) / 32; i++, ++subset_it) | ||
| 827 | - node->split->subset[i] = (int)*subset_it; | ||
| 828 | - } else { | ||
| 829 | - float threshold = (float)fn["threshold"]; | ||
| 830 | - node->split = data->new_split_ord(0, threshold, 0, 0, 0); | ||
| 831 | - } | ||
| 832 | - | ||
| 833 | - node->split->var_idx = (int)fn["feature_idx"]; | ||
| 834 | - | ||
| 835 | - CvDTreeNode *leftChild = data->new_node(node, 0, 0, 0); | ||
| 836 | - node->left = leftChild; | ||
| 837 | - readRecursive(fn["left"], leftChild, data); | ||
| 838 | - | ||
| 839 | - CvDTreeNode *rightChild = data->new_node(node, 0, 0, 0); | ||
| 840 | - node->right = rightChild; | ||
| 841 | - readRecursive(fn["right"], rightChild, data); | ||
| 842 | - } | ||
| 843 | -} | ||
| 844 | - | ||
| 845 | -void CascadeBoostTree::read(const FileNode &fn, CvBoost* _ensemble, CvDTreeTrainData* _data) | ||
| 846 | -{ | ||
| 847 | - clear(); | ||
| 848 | - data = _data; | ||
| 849 | - ensemble = _ensemble; | ||
| 850 | - pruned_tree_idx = 0; | ||
| 851 | - | ||
| 852 | - root = data->new_node(0, 0, 0, 0); | ||
| 853 | - readRecursive(fn, root, data); | ||
| 854 | -} | ||
| 855 | -*/ | ||
| 856 | - | ||
| 857 | void CascadeBoostTree::split_node_data( CvDTreeNode* node ) | 814 | void CascadeBoostTree::split_node_data( CvDTreeNode* node ) |
| 858 | { | 815 | { |
| 859 | int n = node->sample_count, nl, nr, scount = data->sample_count; | 816 | int n = node->sample_count, nl, nr, scount = data->sample_count; |
openbr/core/cascade.cpp deleted
| 1 | -#include "cascade.h" | ||
| 2 | - | ||
| 3 | -using namespace br; | ||
| 4 | - | ||
| 5 | -void br::groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vector<int>* weights, vector<double>* levelWeights) | ||
| 6 | -{ | ||
| 7 | - if( groupThreshold <= 0 || rectList.empty() ) | ||
| 8 | - { | ||
| 9 | - if( weights ) | ||
| 10 | - { | ||
| 11 | - size_t i, sz = rectList.size(); | ||
| 12 | - weights->resize(sz); | ||
| 13 | - for( i = 0; i < sz; i++ ) | ||
| 14 | - (*weights)[i] = 1; | ||
| 15 | - } | ||
| 16 | - return; | ||
| 17 | - } | ||
| 18 | - | ||
| 19 | - vector<int> labels; | ||
| 20 | - int nclasses = partition(rectList, labels, SimilarRects(eps)); | ||
| 21 | - | ||
| 22 | - vector<Rect> rrects(nclasses); | ||
| 23 | - vector<int> rweights(nclasses, 0); | ||
| 24 | - vector<int> rejectLevels(nclasses, 0); | ||
| 25 | - vector<double> rejectWeights(nclasses, DBL_MIN); | ||
| 26 | - int i, j, nlabels = (int)labels.size(); | ||
| 27 | - for( i = 0; i < nlabels; i++ ) | ||
| 28 | - { | ||
| 29 | - int cls = labels[i]; | ||
| 30 | - rrects[cls].x += rectList[i].x; | ||
| 31 | - rrects[cls].y += rectList[i].y; | ||
| 32 | - rrects[cls].width += rectList[i].width; | ||
| 33 | - rrects[cls].height += rectList[i].height; | ||
| 34 | - rweights[cls]++; | ||
| 35 | - } | ||
| 36 | - if ( levelWeights && weights && !weights->empty() && !levelWeights->empty() ) | ||
| 37 | - { | ||
| 38 | - for( i = 0; i < nlabels; i++ ) | ||
| 39 | - { | ||
| 40 | - int cls = labels[i]; | ||
| 41 | - if( (*weights)[i] > rejectLevels[cls] ) | ||
| 42 | - { | ||
| 43 | - rejectLevels[cls] = (*weights)[i]; | ||
| 44 | - rejectWeights[cls] = (*levelWeights)[i]; | ||
| 45 | - } | ||
| 46 | - else if( ( (*weights)[i] == rejectLevels[cls] ) && ( (*levelWeights)[i] > rejectWeights[cls] ) ) | ||
| 47 | - rejectWeights[cls] = (*levelWeights)[i]; | ||
| 48 | - } | ||
| 49 | - } | ||
| 50 | - | ||
| 51 | - for( i = 0; i < nclasses; i++ ) | ||
| 52 | - { | ||
| 53 | - Rect r = rrects[i]; | ||
| 54 | - float s = 1.f/rweights[i]; | ||
| 55 | - rrects[i] = Rect(saturate_cast<int>(r.x*s), | ||
| 56 | - saturate_cast<int>(r.y*s), | ||
| 57 | - saturate_cast<int>(r.width*s), | ||
| 58 | - saturate_cast<int>(r.height*s)); | ||
| 59 | - } | ||
| 60 | - | ||
| 61 | - rectList.clear(); | ||
| 62 | - if( weights ) | ||
| 63 | - weights->clear(); | ||
| 64 | - if( levelWeights ) | ||
| 65 | - levelWeights->clear(); | ||
| 66 | - | ||
| 67 | - for( i = 0; i < nclasses; i++ ) | ||
| 68 | - { | ||
| 69 | - Rect r1 = rrects[i]; | ||
| 70 | - int n1 = levelWeights ? rejectLevels[i] : rweights[i]; | ||
| 71 | - | ||
| 72 | - double w1 = rejectWeights[i]; | ||
| 73 | - if( n1 <= groupThreshold ) | ||
| 74 | - continue; | ||
| 75 | - // filter out small face rectangles inside large rectangles | ||
| 76 | - for( j = 0; j < nclasses; j++ ) | ||
| 77 | - { | ||
| 78 | - int n2 = rweights[j]; | ||
| 79 | - | ||
| 80 | - if( j == i || n2 <= groupThreshold ) | ||
| 81 | - continue; | ||
| 82 | - Rect r2 = rrects[j]; | ||
| 83 | - | ||
| 84 | - int dx = saturate_cast<int>( r2.width * eps ); | ||
| 85 | - int dy = saturate_cast<int>( r2.height * eps ); | ||
| 86 | - | ||
| 87 | - if( i != j && | ||
| 88 | - r1.x >= r2.x - dx && | ||
| 89 | - r1.y >= r2.y - dy && | ||
| 90 | - r1.x + r1.width <= r2.x + r2.width + dx && | ||
| 91 | - r1.y + r1.height <= r2.y + r2.height + dy && | ||
| 92 | - (n2 > std::max(3, n1) || n1 < 3) ) | ||
| 93 | - break; | ||
| 94 | - } | ||
| 95 | - | ||
| 96 | - if( j == nclasses ) | ||
| 97 | - { | ||
| 98 | - rectList.push_back(r1); | ||
| 99 | - if( weights ) | ||
| 100 | - weights->push_back(n1); | ||
| 101 | - if( levelWeights ) | ||
| 102 | - levelWeights->push_back(w1); | ||
| 103 | - } | ||
| 104 | - } | ||
| 105 | -} | ||
| 106 | - | ||
| 107 | -void br::groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps) | ||
| 108 | -{ | ||
| 109 | - groupRectangles(rectList, groupThreshold, eps, 0, 0); | ||
| 110 | -} | ||
| 111 | - | ||
| 112 | -void br::groupRectangles(vector<Rect>& rectList, vector<int>& weights, int groupThreshold, double eps) | ||
| 113 | -{ | ||
| 114 | - groupRectangles(rectList, groupThreshold, eps, &weights, 0); | ||
| 115 | -} | ||
| 116 | - | ||
| 117 | -void br::groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vector<double>& levelWeights, int groupThreshold, double eps) | ||
| 118 | -{ | ||
| 119 | - groupRectangles(rectList, groupThreshold, eps, &rejectLevels, &levelWeights); | ||
| 120 | -} | ||
| 121 | - | ||
| 122 | -// --------------------------------- Cascade Classifier ---------------------------------- | ||
| 123 | - | ||
| 124 | -static void loadRecursive(const FileNode &fn, _CascadeClassifier::Node *node, int maxCatCount) | ||
| 125 | -{ | ||
| 126 | - bool hasChildren = (int)fn["hasChildren"]; | ||
| 127 | - if (!hasChildren) | ||
| 128 | - node->value = (float)fn["value"]; | ||
| 129 | - else { | ||
| 130 | - if (maxCatCount > 0) { | ||
| 131 | - FileNode subset_fn = fn["subset"]; | ||
| 132 | - for (FileNodeIterator subset_it = subset_fn.begin(); subset_it != subset_fn.end(); ++subset_it) | ||
| 133 | - node->subset.append((int)*subset_it); | ||
| 134 | - } else { | ||
| 135 | - node->threshold = (float)fn["threshold"]; | ||
| 136 | - } | ||
| 137 | - | ||
| 138 | - node->featureIdx = (int)fn["featureIdx"]; | ||
| 139 | - | ||
| 140 | - node->left = new _CascadeClassifier::Node; node->right = new _CascadeClassifier::Node; | ||
| 141 | - loadRecursive(fn["left"], node->left, maxCatCount); | ||
| 142 | - loadRecursive(fn["right"], node->right, maxCatCount); | ||
| 143 | - } | ||
| 144 | -} | ||
| 145 | - | ||
| 146 | -bool _CascadeClassifier::load(const string& filename) | ||
| 147 | -{ | ||
| 148 | - FileStorage fs(filename, FileStorage::READ); | ||
| 149 | - if (!fs.isOpened()) | ||
| 150 | - return false; | ||
| 151 | - | ||
| 152 | - FileNode root = fs.getFirstTopLevelNode(); | ||
| 153 | - | ||
| 154 | - const float THRESHOLD_EPS = 1e-5; | ||
| 155 | - | ||
| 156 | - int maxCatCount = representation->maxCatCount(); | ||
| 157 | - | ||
| 158 | - // load stages | ||
| 159 | - FileNode stages_fn = root["stages"]; | ||
| 160 | - if( stages_fn.empty() ) | ||
| 161 | - return false; | ||
| 162 | - | ||
| 163 | - for (FileNodeIterator stage_it = stages_fn.begin(); stage_it != stages_fn.end(); ++stage_it) { | ||
| 164 | - FileNode stage_fn = *stage_it; | ||
| 165 | - | ||
| 166 | - Stage stage; | ||
| 167 | - stage.threshold = (float)stage_fn["stageThreshold"] - THRESHOLD_EPS; | ||
| 168 | - | ||
| 169 | - FileNode nodes_fn = stage_fn["weakClassifiers"]; | ||
| 170 | - if(nodes_fn.empty()) | ||
| 171 | - return false; | ||
| 172 | - | ||
| 173 | - for (FileNodeIterator node_it = nodes_fn.begin(); node_it != nodes_fn.end(); ++node_it) { | ||
| 174 | - FileNode node_fn = *node_it; | ||
| 175 | - | ||
| 176 | - Node *root = new Node; | ||
| 177 | - loadRecursive(node_fn, root, maxCatCount); | ||
| 178 | - | ||
| 179 | - stage.trees.append(root); | ||
| 180 | - } | ||
| 181 | - | ||
| 182 | - stages.append(stage); | ||
| 183 | - } | ||
| 184 | - | ||
| 185 | - return true; | ||
| 186 | -} | ||
| 187 | - | ||
| 188 | -int _CascadeClassifier::predict(const Mat &image, double &sum) const | ||
| 189 | -{ | ||
| 190 | - for (int stageIdx = 0; stageIdx < stages.size(); stageIdx++) { | ||
| 191 | - Stage stage = stages[stageIdx]; | ||
| 192 | - sum = 0; | ||
| 193 | - | ||
| 194 | - for (int treeIdx = 0; treeIdx < stage.trees.size(); treeIdx++) { | ||
| 195 | - Node *node = stage.trees[treeIdx]; | ||
| 196 | - | ||
| 197 | - while (node->left) { | ||
| 198 | - if (representation->maxCatCount() > 1) { | ||
| 199 | - int c = (int)representation->evaluate(image, node->featureIdx); | ||
| 200 | - node = (node->subset[c >> 5] & (1 << (c & 31))) ? node->left : node->right; | ||
| 201 | - } else { | ||
| 202 | - double val = representation->evaluate(image, node->featureIdx); | ||
| 203 | - node = val < node->threshold ? node->left : node->right; | ||
| 204 | - } | ||
| 205 | - } | ||
| 206 | - sum += node->value; | ||
| 207 | - } | ||
| 208 | - | ||
| 209 | - if (sum < stage.threshold) | ||
| 210 | - return stageIdx; | ||
| 211 | - } | ||
| 212 | - | ||
| 213 | - return stages.size(); | ||
| 214 | -} | ||
| 215 | - | ||
| 216 | -void _CascadeClassifier::detectMultiScale(const Mat& image, vector<Rect>& objects, vector<int>& rejectLevels, | ||
| 217 | - vector<double>& levelWeights, | ||
| 218 | - double scaleFactor, int minNeighbors, | ||
| 219 | - Size minSize, Size maxSize) const | ||
| 220 | -{ | ||
| 221 | - const double GROUP_EPS = 0.2; | ||
| 222 | - | ||
| 223 | - CV_Assert( scaleFactor > 1 && image.depth() == CV_8U ); | ||
| 224 | - | ||
| 225 | - if (stages.empty()) | ||
| 226 | - return; | ||
| 227 | - | ||
| 228 | - if( maxSize.height == 0 || maxSize.width == 0 ) | ||
| 229 | - maxSize = image.size(); | ||
| 230 | - | ||
| 231 | - Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U); | ||
| 232 | - | ||
| 233 | - for (double factor = 1; ; factor *= scaleFactor) { | ||
| 234 | - int dx, dy; | ||
| 235 | - Size originalWindowSize = representation->windowSize(&dx, &dy); | ||
| 236 | - | ||
| 237 | - Size windowSize(cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) ); | ||
| 238 | - Size scaledImageSize(cvRound(image.cols/factor ), cvRound(image.rows/factor)); | ||
| 239 | - Size processingRectSize(scaledImageSize.width - originalWindowSize.width, scaledImageSize.height - originalWindowSize.height); | ||
| 240 | - | ||
| 241 | - if (processingRectSize.width <= 0 || processingRectSize.height <= 0) | ||
| 242 | - break; | ||
| 243 | - if (windowSize.width > maxSize.width || windowSize.height > maxSize.height) | ||
| 244 | - break; | ||
| 245 | - if (windowSize.width < minSize.width || windowSize.height < minSize.height) | ||
| 246 | - continue; | ||
| 247 | - | ||
| 248 | - Mat scaledImage(scaledImageSize, CV_8U, imageBuffer.data); | ||
| 249 | - resize(image, scaledImage, scaledImageSize, 0, 0, CV_INTER_LINEAR); | ||
| 250 | - | ||
| 251 | - Mat repImage; | ||
| 252 | - representation->preprocess(scaledImage, repImage); | ||
| 253 | - | ||
| 254 | - int yStep = factor > 2. ? 1 : 2; | ||
| 255 | - for (int y = 0; y < processingRectSize.height; y += yStep) { | ||
| 256 | - for (int x = 0; x < processingRectSize.width; x += yStep) { | ||
| 257 | - Mat window = repImage(Rect(Point(x, y), Size(originalWindowSize.width + dx, originalWindowSize.height + dy))).clone(); | ||
| 258 | - | ||
| 259 | - double gypWeight; | ||
| 260 | - int result = predict(window, gypWeight); | ||
| 261 | - | ||
| 262 | - if (stages.size() - result < 4) { | ||
| 263 | - objects.push_back(Rect(cvRound(x*factor), cvRound(y*factor), windowSize.width, windowSize.height)); | ||
| 264 | - rejectLevels.push_back(result); | ||
| 265 | - levelWeights.push_back(gypWeight); | ||
| 266 | - } | ||
| 267 | - | ||
| 268 | - if (result == 0) | ||
| 269 | - x += yStep; | ||
| 270 | - } | ||
| 271 | - } | ||
| 272 | - } | ||
| 273 | - | ||
| 274 | - groupRectangles(objects, rejectLevels, levelWeights, minNeighbors, GROUP_EPS); | ||
| 275 | -} |
openbr/core/cascade.h deleted
| 1 | -#ifndef CASCADE_H | ||
| 2 | -#define CASCADE_H | ||
| 3 | - | ||
| 4 | -#include <openbr/openbr_plugin.h> | ||
| 5 | -#include <opencv2/imgproc/imgproc.hpp> | ||
| 6 | - | ||
| 7 | -using namespace std; | ||
| 8 | -using namespace cv; | ||
| 9 | - | ||
| 10 | -namespace br | ||
| 11 | -{ | ||
| 12 | - | ||
| 13 | -// class for grouping object candidates, detected by Cascade Classifier, HOG etc. | ||
| 14 | -// instance of the class is to be passed to cv::partition (see cxoperations.hpp) | ||
| 15 | -class SimilarRects | ||
| 16 | -{ | ||
| 17 | -public: | ||
| 18 | - SimilarRects(double _eps) : eps(_eps) {} | ||
| 19 | - inline bool operator()(const Rect& r1, const Rect& r2) const | ||
| 20 | - { | ||
| 21 | - double delta = eps*(std::min(r1.width, r2.width) + std::min(r1.height, r2.height))*0.5; | ||
| 22 | - return std::abs(r1.x - r2.x) <= delta && | ||
| 23 | - std::abs(r1.y - r2.y) <= delta && | ||
| 24 | - std::abs(r1.x + r1.width - r2.x - r2.width) <= delta && | ||
| 25 | - std::abs(r1.y + r1.height - r2.y - r2.height) <= delta; | ||
| 26 | - } | ||
| 27 | - double eps; | ||
| 28 | -}; | ||
| 29 | - | ||
| 30 | -void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps=0.2); | ||
| 31 | -void groupRectangles(vector<Rect>& rectList, vector<int>& weights, int groupThreshold, double eps=0.2); | ||
| 32 | -void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vector<int>* weights, vector<double>* levelWeights ); | ||
| 33 | -void groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vector<double>& levelWeights, int groupThreshold, double eps=0.2); | ||
| 34 | - | ||
| 35 | -class _CascadeClassifier | ||
| 36 | -{ | ||
| 37 | -public: | ||
| 38 | - _CascadeClassifier() : representation(Representation::make("MBLBP(24,24)", NULL)) {} | ||
| 39 | - _CascadeClassifier(const string& filename) : representation(Representation::make("MBLBP(24,24)", NULL)) { load(filename); } | ||
| 40 | - ~_CascadeClassifier() {} | ||
| 41 | - | ||
| 42 | - bool load(const string& filename); | ||
| 43 | - | ||
| 44 | - void detectMultiScale(const Mat& image, | ||
| 45 | - vector<Rect>& objects, | ||
| 46 | - vector<int>& rejectLevels, | ||
| 47 | - vector<double>& levelWeights, | ||
| 48 | - double scaleFactor=1.1, | ||
| 49 | - int minNeighbors=3, | ||
| 50 | - Size minSize=Size(), | ||
| 51 | - Size maxSize=Size()) const; | ||
| 52 | - | ||
| 53 | - int predict(const Mat &image, double &weight) const; | ||
| 54 | - | ||
| 55 | - struct Node | ||
| 56 | - { | ||
| 57 | - Node() : left(NULL), right(NULL) {} | ||
| 58 | - | ||
| 59 | - int featureIdx; | ||
| 60 | - float threshold; // for ordered features only | ||
| 61 | - QList<int> subset; // for categorical features only | ||
| 62 | - float value; // for leaf nodes only | ||
| 63 | - Node *left; | ||
| 64 | - Node *right; | ||
| 65 | - }; | ||
| 66 | - | ||
| 67 | - struct Stage | ||
| 68 | - { | ||
| 69 | - QList<Node*> trees; | ||
| 70 | - float threshold; | ||
| 71 | - }; | ||
| 72 | - | ||
| 73 | - QList<Stage> stages; | ||
| 74 | - Representation *representation; | ||
| 75 | -}; | ||
| 76 | - | ||
| 77 | -} // namespace br | ||
| 78 | - | ||
| 79 | -#endif // CASCADE_H |
openbr/openbr_plugin.h
| @@ -1424,15 +1424,11 @@ public: | @@ -1424,15 +1424,11 @@ public: | ||
| 1424 | virtual void train(const QList<cv::Mat> &images, const QList<float> &labels) = 0; | 1424 | virtual void train(const QList<cv::Mat> &images, const QList<float> &labels) = 0; |
| 1425 | virtual float classify(const cv::Mat &image, bool process = true, float *confidence = NULL) const = 0; | 1425 | virtual float classify(const cv::Mat &image, bool process = true, float *confidence = NULL) const = 0; |
| 1426 | 1426 | ||
| 1427 | - // Slots for representation | 1427 | + // Slots for representations |
| 1428 | virtual cv::Mat preprocess(const cv::Mat &image) const = 0; | 1428 | virtual cv::Mat preprocess(const cv::Mat &image) const = 0; |
| 1429 | virtual cv::Size windowSize(int *dx = NULL, int *dy = NULL) const = 0; | 1429 | virtual cv::Size windowSize(int *dx = NULL, int *dy = NULL) const = 0; |
| 1430 | - | ||
| 1431 | - // OpenCV compatibility | ||
| 1432 | virtual int numFeatures() const = 0; | 1430 | virtual int numFeatures() const = 0; |
| 1433 | virtual int maxCatCount() const = 0; | 1431 | virtual int maxCatCount() const = 0; |
| 1434 | - virtual void write(cv::FileStorage &fs) const { (void)fs; } | ||
| 1435 | - virtual void read(const cv::FileNode &node) { (void)node; } | ||
| 1436 | }; | 1432 | }; |
| 1437 | 1433 | ||
| 1438 | /*! | 1434 | /*! |
openbr/plugins/classification/boostedforest.cpp
| @@ -39,50 +39,47 @@ static void buildTreeRecursive(Node *node, const CvDTreeNode *cv_node, int maxCa | @@ -39,50 +39,47 @@ static void buildTreeRecursive(Node *node, const CvDTreeNode *cv_node, int maxCa | ||
| 39 | } | 39 | } |
| 40 | } | 40 | } |
| 41 | 41 | ||
| 42 | -static void readRecursive(const FileNode &fn, Node *node, int maxCatCount) | 42 | +static void loadRecursive(QDataStream &stream, Node *node, int maxCatCount) |
| 43 | { | 43 | { |
| 44 | - bool hasChildren = (int)fn["hasChildren"]; | 44 | + bool hasChildren; stream >> hasChildren; |
| 45 | + | ||
| 45 | if (!hasChildren) { | 46 | if (!hasChildren) { |
| 46 | - node->value = (float)fn["value"]; | 47 | + stream >> node->value; |
| 47 | node->left = node->right = NULL; | 48 | node->left = node->right = NULL; |
| 48 | } else { | 49 | } else { |
| 49 | - if (maxCatCount > 0) { | ||
| 50 | - FileNode subset_fn = fn["subset"]; | ||
| 51 | - for (FileNodeIterator subset_it = subset_fn.begin(); subset_it != subset_fn.end(); ++subset_it) | ||
| 52 | - node->subset.append((int)*subset_it); | ||
| 53 | - } else { | ||
| 54 | - node->threshold = (float)fn["threshold"]; | ||
| 55 | - } | 50 | + if (maxCatCount > 0) |
| 51 | + for (int i = 0; i < (maxCatCount + 31)/32; i++) { | ||
| 52 | + int s; stream >> s; node->subset.append(s); | ||
| 53 | + } | ||
| 54 | + else | ||
| 55 | + stream >> node->threshold; | ||
| 56 | 56 | ||
| 57 | - node->featureIdx = (int)fn["featureIdx"]; | 57 | + stream >> node->featureIdx; |
| 58 | 58 | ||
| 59 | node->left = new Node; node->right = new Node; | 59 | node->left = new Node; node->right = new Node; |
| 60 | - readRecursive(fn["left"], node->left, maxCatCount); | ||
| 61 | - readRecursive(fn["right"], node->right, maxCatCount); | 60 | + loadRecursive(stream, node->left, maxCatCount); |
| 61 | + loadRecursive(stream, node->right, maxCatCount); | ||
| 62 | } | 62 | } |
| 63 | } | 63 | } |
| 64 | 64 | ||
| 65 | -static void writeRecursive(FileStorage &fs, const Node *node, int maxCatCount) | 65 | +static void storeRecursive(QDataStream &stream, const Node *node, int maxCatCount) |
| 66 | { | 66 | { |
| 67 | bool hasChildren = node->left ? true : false; | 67 | bool hasChildren = node->left ? true : false; |
| 68 | - fs << "hasChildren" << hasChildren; | 68 | + stream << hasChildren; |
| 69 | 69 | ||
| 70 | if (!hasChildren) | 70 | if (!hasChildren) |
| 71 | - fs << "value" << node->value; | 71 | + stream << node->value; |
| 72 | else { | 72 | else { |
| 73 | - if (maxCatCount > 0) { | ||
| 74 | - fs << "subset" << "["; | 73 | + if (maxCatCount > 0) |
| 75 | for (int i = 0; i < (maxCatCount + 31)/32; i++) | 74 | for (int i = 0; i < (maxCatCount + 31)/32; i++) |
| 76 | - fs << node->subset[i]; | ||
| 77 | - fs << "]"; | ||
| 78 | - } else { | ||
| 79 | - fs << "threshold" << node->threshold; | ||
| 80 | - } | 75 | + stream << node->subset[i]; |
| 76 | + else | ||
| 77 | + stream << node->threshold; | ||
| 81 | 78 | ||
| 82 | - fs << "featureIdx" << node->featureIdx; | 79 | + stream << node->featureIdx; |
| 83 | 80 | ||
| 84 | - fs << "left" << "{"; writeRecursive(fs, node->left, maxCatCount); fs << "}"; | ||
| 85 | - fs << "right" << "{"; writeRecursive(fs, node->right, maxCatCount); fs << "}"; | 81 | + storeRecursive(stream, node->left, maxCatCount); |
| 82 | + storeRecursive(stream, node->right, maxCatCount); | ||
| 86 | } | 83 | } |
| 87 | } | 84 | } |
| 88 | 85 | ||
| @@ -142,7 +139,7 @@ class BoostedForestClassifier : public Classifier | @@ -142,7 +139,7 @@ class BoostedForestClassifier : public Classifier | ||
| 142 | Node *node = classifiers[i]; | 139 | Node *node = classifiers[i]; |
| 143 | 140 | ||
| 144 | while (node->left) { | 141 | while (node->left) { |
| 145 | - if (representation->maxCatCount() > 1) { | 142 | + if (representation->maxCatCount() > 0) { |
| 146 | int c = (int)representation->evaluate(m, node->featureIdx); | 143 | int c = (int)representation->evaluate(m, node->featureIdx); |
| 147 | node = (node->subset[c >> 5] & (1 << (c & 31))) ? node->left : node->right; | 144 | node = (node->subset[c >> 5] & (1 << (c & 31))) ? node->left : node->right; |
| 148 | } else { | 145 | } else { |
| @@ -150,6 +147,7 @@ class BoostedForestClassifier : public Classifier | @@ -150,6 +147,7 @@ class BoostedForestClassifier : public Classifier | ||
| 150 | node = val <= node->threshold ? node->left : node->right; | 147 | node = val <= node->threshold ? node->left : node->right; |
| 151 | } | 148 | } |
| 152 | } | 149 | } |
| 150 | + | ||
| 153 | sum += node->value; | 151 | sum += node->value; |
| 154 | } | 152 | } |
| 155 | 153 | ||
| @@ -180,28 +178,23 @@ class BoostedForestClassifier : public Classifier | @@ -180,28 +178,23 @@ class BoostedForestClassifier : public Classifier | ||
| 180 | return representation->windowSize(dx, dy); | 178 | return representation->windowSize(dx, dy); |
| 181 | } | 179 | } |
| 182 | 180 | ||
| 183 | - void read(const FileNode &node) | 181 | + void load(QDataStream &stream) |
| 184 | { | 182 | { |
| 185 | - threshold = (float)node["stageThreshold"]; | ||
| 186 | - FileNode weaks_fn = node["weakClassifiers"]; | ||
| 187 | - for (FileNodeIterator weaks_it = weaks_fn.begin(); weaks_it != weaks_fn.end(); ++weaks_it) { | ||
| 188 | - Node *root = new Node; | ||
| 189 | - readRecursive(*weaks_it, root, representation->maxCatCount()); | ||
| 190 | - classifiers.append(root); | 183 | + stream >> threshold; |
| 184 | + int numClassifiers; stream >> numClassifiers; | ||
| 185 | + for (int i = 0; i < numClassifiers; i++) { | ||
| 186 | + Node *classifier = new Node; | ||
| 187 | + loadRecursive(stream, classifier, representation->maxCatCount()); | ||
| 188 | + classifiers.append(classifier); | ||
| 191 | } | 189 | } |
| 192 | } | 190 | } |
| 193 | 191 | ||
| 194 | - void write(FileStorage &fs) const | 192 | + void store(QDataStream &stream) const |
| 195 | { | 193 | { |
| 196 | - fs << "stageThreshold" << threshold; | ||
| 197 | - fs << "weakSize" << classifiers.size(); | ||
| 198 | - fs << "weakClassifiers" << "["; | ||
| 199 | - foreach (const Node *root, classifiers) { | ||
| 200 | - fs << "{"; | ||
| 201 | - writeRecursive(fs, root, representation->maxCatCount()); | ||
| 202 | - fs << "}"; | ||
| 203 | - } | ||
| 204 | - fs << "]"; | 194 | + stream << threshold; |
| 195 | + stream << classifiers.size(); | ||
| 196 | + foreach (const Node *classifier, classifiers) | ||
| 197 | + storeRecursive(stream, classifier, representation->maxCatCount()); | ||
| 205 | } | 198 | } |
| 206 | }; | 199 | }; |
| 207 | 200 |
openbr/plugins/classification/cascade.cpp
| @@ -185,25 +185,21 @@ class CascadeClassifier : public Classifier | @@ -185,25 +185,21 @@ class CascadeClassifier : public Classifier | ||
| 185 | return stages.first()->windowSize(dx, dy); | 185 | return stages.first()->windowSize(dx, dy); |
| 186 | } | 186 | } |
| 187 | 187 | ||
| 188 | - void read(const FileNode &node) | 188 | + void load(QDataStream &stream) |
| 189 | { | 189 | { |
| 190 | - FileNode stages_fn = node["stages"]; | ||
| 191 | - for (FileNodeIterator stages_it = stages_fn.begin(); stages_it != stages_fn.end(); ++stages_it) { | 190 | + int numStages; stream >> numStages; |
| 191 | + for (int i = 0; i < numStages; i++) { | ||
| 192 | Classifier *nextStage = Classifier::make(stageDescription, NULL); | 192 | Classifier *nextStage = Classifier::make(stageDescription, NULL); |
| 193 | - nextStage->read(*stages_it); | 193 | + nextStage->load(stream); |
| 194 | stages.append(nextStage); | 194 | stages.append(nextStage); |
| 195 | } | 195 | } |
| 196 | } | 196 | } |
| 197 | 197 | ||
| 198 | - void write(FileStorage &fs) const | 198 | + void store(QDataStream &stream) const |
| 199 | { | 199 | { |
| 200 | - fs << "stages" << "["; | ||
| 201 | - foreach (const Classifier *stage, stages) { | ||
| 202 | - fs << "{"; | ||
| 203 | - stage->write(fs); | ||
| 204 | - fs << "}"; | ||
| 205 | - } | ||
| 206 | - fs << "]"; | 200 | + stream << stages.size(); |
| 201 | + foreach (const Classifier *stage, stages) | ||
| 202 | + stage->store(stream); | ||
| 207 | } | 203 | } |
| 208 | 204 | ||
| 209 | private: | 205 | private: |
openbr/plugins/imgproc/slidingwindow.cpp
| @@ -151,36 +151,12 @@ class SlidingWindowTransform : public MetaTransform | @@ -151,36 +151,12 @@ class SlidingWindowTransform : public MetaTransform | ||
| 151 | 151 | ||
| 152 | void load(QDataStream &stream) | 152 | void load(QDataStream &stream) |
| 153 | { | 153 | { |
| 154 | - (void)stream; | ||
| 155 | - | ||
| 156 | - QString filename = model + "/cascade.xml"; | ||
| 157 | - FileStorage fs(filename.toStdString(), FileStorage::READ); | ||
| 158 | - if (!fs.isOpened()) | ||
| 159 | - return; | ||
| 160 | - | ||
| 161 | - classifier->read(fs.getFirstTopLevelNode()); | 154 | + classifier->load(stream); |
| 162 | } | 155 | } |
| 163 | 156 | ||
| 164 | void store(QDataStream &stream) const | 157 | void store(QDataStream &stream) const |
| 165 | { | 158 | { |
| 166 | - (void) stream; | ||
| 167 | - | ||
| 168 | - QString path = model; | ||
| 169 | - QtUtils::touchDir(QDir(path)); | ||
| 170 | - | ||
| 171 | - QString filename = path + "/cascade.xml"; | ||
| 172 | - FileStorage fs(filename.toStdString(), FileStorage::WRITE); | ||
| 173 | - | ||
| 174 | - if (!fs.isOpened()) { | ||
| 175 | - qWarning("Unable to open file: %s", qPrintable(filename)); | ||
| 176 | - return; | ||
| 177 | - } | ||
| 178 | - | ||
| 179 | - fs << FileStorage::getDefaultObjectName(filename.toStdString()) << "{"; | ||
| 180 | - | ||
| 181 | - classifier->write(fs); | ||
| 182 | - | ||
| 183 | - fs << "}"; | 159 | + classifier->store(stream); |
| 184 | } | 160 | } |
| 185 | }; | 161 | }; |
| 186 | 162 |