Commit edcbc8b8fbb3e66c4e46c654d101e49e212a700d
1 parent
e4a5aed6
Clean-up and steps towards incorparting representations in the backend
Showing
8 changed files
with
220 additions
and
294 deletions
openbr/core/boost.cpp
| ... | ... | @@ -171,29 +171,6 @@ void CascadeBoostParams::write( FileStorage &fs ) const |
| 171 | 171 | fs << CC_WEAK_COUNT << weak_count; |
| 172 | 172 | } |
| 173 | 173 | |
| 174 | -// ----------------------------- Compatibility ---------------------------------- | |
| 175 | - | |
| 176 | -BoostBRCompatibility::BoostBRCompatibility(Representation *_representation, int _numSamples) | |
| 177 | -{ | |
| 178 | - representation = _representation; | |
| 179 | - data.create(_numSamples, representation->windowSize().area(), CV_32SC1); | |
| 180 | - cls.create(_numSamples, 1, CV_32FC1); | |
| 181 | -} | |
| 182 | - | |
| 183 | -void BoostBRCompatibility::setImage(const Mat &img, uchar clsLabel, int idx) | |
| 184 | -{ | |
| 185 | - cls.ptr<float>(idx)[0] = clsLabel; | |
| 186 | - Mat preprocessed = representation->preprocess(img); | |
| 187 | - Mat copier(representation->windowSize(), CV_32FC1, data.ptr<int>(idx)); | |
| 188 | - preprocessed.copyTo(copier); | |
| 189 | -} | |
| 190 | - | |
| 191 | -float BoostBRCompatibility::operator ()(int featureIdx, int sampleIdx) const | |
| 192 | -{ | |
| 193 | - return representation->evaluate(data.row(sampleIdx).reshape(0, representation->windowSize().height), | |
| 194 | - QList<int>() << featureIdx).ptr<float>()[0]; | |
| 195 | -} | |
| 196 | - | |
| 197 | 174 | //---------------------------- CascadeBoostTrainData ----------------------------- |
| 198 | 175 | |
| 199 | 176 | CvDTreeNode* CascadeBoostTrainData::subsample_data( const CvMat* _subsample_idx ) |
| ... | ... | @@ -361,18 +338,18 @@ CvDTreeNode* CascadeBoostTrainData::subsample_data( const CvMat* _subsample_idx |
| 361 | 338 | return root; |
| 362 | 339 | } |
| 363 | 340 | |
| 364 | -CascadeBoostTrainData::CascadeBoostTrainData( const BoostBRCompatibility* _compat, | |
| 341 | +CascadeBoostTrainData::CascadeBoostTrainData( const FeatureEvaluator* _featureEvaluator, | |
| 365 | 342 | const CvDTreeParams& _params ) |
| 366 | 343 | { |
| 367 | 344 | is_classifier = true; |
| 368 | - var_all = var_count = _compat->numFeatures(); | |
| 345 | + var_all = var_count = (int)_featureEvaluator->getNumFeatures(); | |
| 369 | 346 | |
| 370 | - compat = _compat; | |
| 347 | + featureEvaluator = _featureEvaluator; | |
| 371 | 348 | shared = true; |
| 372 | 349 | set_params( _params ); |
| 373 | - max_c_count = MAX( 2, compat->maxCatCount() ); | |
| 350 | + max_c_count = MAX( 2, featureEvaluator->getMaxCatCount() ); | |
| 374 | 351 | var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 ); |
| 375 | - if ( compat->maxCatCount() > 0 ) | |
| 352 | + if ( featureEvaluator->getMaxCatCount() > 0 ) | |
| 376 | 353 | { |
| 377 | 354 | numPrecalcIdx = 0; |
| 378 | 355 | cat_var_count = var_count; |
| ... | ... | @@ -402,15 +379,15 @@ CascadeBoostTrainData::CascadeBoostTrainData( const BoostBRCompatibility* _compa |
| 402 | 379 | split_heap = cvCreateSet( 0, sizeof(split_heap[0]), maxSplitSize, tree_storage ); |
| 403 | 380 | } |
| 404 | 381 | |
| 405 | -CascadeBoostTrainData::CascadeBoostTrainData( const BoostBRCompatibility* _compat, | |
| 382 | +CascadeBoostTrainData::CascadeBoostTrainData( const FeatureEvaluator* _featureEvaluator, | |
| 406 | 383 | int _numSamples, |
| 407 | 384 | int _precalcValBufSize, int _precalcIdxBufSize, |
| 408 | 385 | const CvDTreeParams& _params ) |
| 409 | 386 | { |
| 410 | - setData( _compat, _numSamples, _precalcValBufSize, _precalcIdxBufSize, _params ); | |
| 387 | + setData( _featureEvaluator, _numSamples, _precalcValBufSize, _precalcIdxBufSize, _params ); | |
| 411 | 388 | } |
| 412 | 389 | |
| 413 | -void CascadeBoostTrainData::setData( const BoostBRCompatibility* _compat, | |
| 390 | +void CascadeBoostTrainData::setData( const FeatureEvaluator* _featureEvaluator, | |
| 414 | 391 | int _numSamples, |
| 415 | 392 | int _precalcValBufSize, int _precalcIdxBufSize, |
| 416 | 393 | const CvDTreeParams& _params ) |
| ... | ... | @@ -421,7 +398,6 @@ void CascadeBoostTrainData::setData( const BoostBRCompatibility* _compat, |
| 421 | 398 | uint64 effective_buf_size = 0; |
| 422 | 399 | int effective_buf_height = 0, effective_buf_width = 0; |
| 423 | 400 | |
| 424 | - | |
| 425 | 401 | clear(); |
| 426 | 402 | shared = true; |
| 427 | 403 | have_labels = true; |
| ... | ... | @@ -432,17 +408,18 @@ void CascadeBoostTrainData::setData( const BoostBRCompatibility* _compat, |
| 432 | 408 | |
| 433 | 409 | set_params( _params ); |
| 434 | 410 | |
| 435 | - compat = _compat; | |
| 411 | + CV_Assert( _featureEvaluator ); | |
| 412 | + featureEvaluator = _featureEvaluator; | |
| 436 | 413 | |
| 437 | - max_c_count = MAX( 2, compat->maxCatCount() ); | |
| 438 | - _resp = compat->getCls(); | |
| 414 | + max_c_count = MAX( 2, featureEvaluator->getMaxCatCount() ); | |
| 415 | + _resp = featureEvaluator->getCls(); | |
| 439 | 416 | responses = &_resp; |
| 440 | 417 | // TODO: check responses: elements must be 0 or 1 |
| 441 | 418 | |
| 442 | 419 | if( _precalcValBufSize < 0 || _precalcIdxBufSize < 0) |
| 443 | 420 | CV_Error( CV_StsOutOfRange, "_numPrecalcVal and _numPrecalcIdx must be positive or 0" ); |
| 444 | 421 | |
| 445 | - var_count = var_all = compat->numFeatures(); | |
| 422 | + var_count = var_all = featureEvaluator->getNumFeatures() * featureEvaluator->getFeatureSize(); | |
| 446 | 423 | sample_count = _numSamples; |
| 447 | 424 | |
| 448 | 425 | is_buf_16u = false; |
| ... | ... | @@ -457,7 +434,7 @@ void CascadeBoostTrainData::setData( const BoostBRCompatibility* _compat, |
| 457 | 434 | |
| 458 | 435 | valCache.create( numPrecalcVal, sample_count, CV_32FC1 ); |
| 459 | 436 | var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 ); |
| 460 | - if ( compat->maxCatCount() > 0 ) | |
| 437 | + if ( featureEvaluator->getMaxCatCount() > 0 ) | |
| 461 | 438 | { |
| 462 | 439 | numPrecalcIdx = 0; |
| 463 | 440 | cat_var_count = var_count; |
| ... | ... | @@ -623,7 +600,7 @@ void CascadeBoostTrainData::get_ord_var_data( CvDTreeNode* n, int vi, float* ord |
| 623 | 600 | { |
| 624 | 601 | int idx = (*sortedIndices)[i]; |
| 625 | 602 | idx = sampleIndices[idx]; |
| 626 | - ordValuesBuf[i] = (*compat)( vi, idx); | |
| 603 | + ordValuesBuf[i] = (*featureEvaluator)( vi, idx); | |
| 627 | 604 | } |
| 628 | 605 | } |
| 629 | 606 | } |
| ... | ... | @@ -645,7 +622,7 @@ void CascadeBoostTrainData::get_ord_var_data( CvDTreeNode* n, int vi, float* ord |
| 645 | 622 | for( int i = 0; i < nodeSampleCount; i++ ) |
| 646 | 623 | { |
| 647 | 624 | sortedIndicesBuf[i] = i; |
| 648 | - sampleValues[i] = (*compat)( vi, sampleIndices[i]); | |
| 625 | + sampleValues[i] = (*featureEvaluator)( vi, sampleIndices[i]); | |
| 649 | 626 | } |
| 650 | 627 | } |
| 651 | 628 | icvSortIntAux( sortedIndicesBuf, nodeSampleCount, &sampleValues[0] ); |
| ... | ... | @@ -673,7 +650,7 @@ const int* CascadeBoostTrainData::get_cat_var_data( CvDTreeNode* n, int vi, int* |
| 673 | 650 | if( vi >= numPrecalcVal && vi < var_count ) |
| 674 | 651 | { |
| 675 | 652 | for( int i = 0; i < nodeSampleCount; i++ ) |
| 676 | - catValuesBuf[i] = (int)(*compat)( vi, sampleIndices[i] ); | |
| 653 | + catValuesBuf[i] = (int)(*featureEvaluator)( vi, sampleIndices[i] ); | |
| 677 | 654 | } |
| 678 | 655 | else |
| 679 | 656 | { |
| ... | ... | @@ -688,15 +665,14 @@ float CascadeBoostTrainData::getVarValue( int vi, int si ) |
| 688 | 665 | { |
| 689 | 666 | if ( vi < numPrecalcVal && !valCache.empty() ) |
| 690 | 667 | return valCache.at<float>( vi, si ); |
| 691 | - return (*compat)( vi, si ); | |
| 668 | + return (*featureEvaluator)( vi, si ); | |
| 692 | 669 | } |
| 693 | 670 | |
| 694 | - | |
| 695 | 671 | struct FeatureIdxOnlyPrecalc : ParallelLoopBody |
| 696 | 672 | { |
| 697 | - FeatureIdxOnlyPrecalc( const BoostBRCompatibility* _compat, CvMat* _buf, int _sample_count, bool _is_buf_16u ) | |
| 673 | + FeatureIdxOnlyPrecalc( const FeatureEvaluator* _featureEvaluator, CvMat* _buf, int _sample_count, bool _is_buf_16u ) | |
| 698 | 674 | { |
| 699 | - compat = _compat; | |
| 675 | + featureEvaluator = _featureEvaluator; | |
| 700 | 676 | sample_count = _sample_count; |
| 701 | 677 | udst = (unsigned short*)_buf->data.s; |
| 702 | 678 | idst = _buf->data.i; |
| ... | ... | @@ -710,7 +686,7 @@ struct FeatureIdxOnlyPrecalc : ParallelLoopBody |
| 710 | 686 | { |
| 711 | 687 | for( int si = 0; si < sample_count; si++ ) |
| 712 | 688 | { |
| 713 | - valCachePtr[si] = (*compat)( fi, si ); | |
| 689 | + valCachePtr[si] = (*featureEvaluator)( fi, si ); | |
| 714 | 690 | if ( is_buf_16u ) |
| 715 | 691 | *(udst + fi*sample_count + si) = (unsigned short)si; |
| 716 | 692 | else |
| ... | ... | @@ -722,7 +698,7 @@ struct FeatureIdxOnlyPrecalc : ParallelLoopBody |
| 722 | 698 | icvSortIntAux( idst + fi*sample_count, sample_count, valCachePtr ); |
| 723 | 699 | } |
| 724 | 700 | } |
| 725 | - const BoostBRCompatibility* compat; | |
| 701 | + const FeatureEvaluator* featureEvaluator; | |
| 726 | 702 | int sample_count; |
| 727 | 703 | int* idst; |
| 728 | 704 | unsigned short* udst; |
| ... | ... | @@ -731,9 +707,9 @@ struct FeatureIdxOnlyPrecalc : ParallelLoopBody |
| 731 | 707 | |
| 732 | 708 | struct FeatureValAndIdxPrecalc : ParallelLoopBody |
| 733 | 709 | { |
| 734 | - FeatureValAndIdxPrecalc( const BoostBRCompatibility* _compat, CvMat* _buf, Mat* _valCache, int _sample_count, bool _is_buf_16u ) | |
| 710 | + FeatureValAndIdxPrecalc( const FeatureEvaluator* _featureEvaluator, CvMat* _buf, Mat* _valCache, int _sample_count, bool _is_buf_16u ) | |
| 735 | 711 | { |
| 736 | - compat = _compat; | |
| 712 | + featureEvaluator = _featureEvaluator; | |
| 737 | 713 | valCache = _valCache; |
| 738 | 714 | sample_count = _sample_count; |
| 739 | 715 | udst = (unsigned short*)_buf->data.s; |
| ... | ... | @@ -746,7 +722,7 @@ struct FeatureValAndIdxPrecalc : ParallelLoopBody |
| 746 | 722 | { |
| 747 | 723 | for( int si = 0; si < sample_count; si++ ) |
| 748 | 724 | { |
| 749 | - valCache->at<float>(fi,si) = (*compat)( fi, si ); | |
| 725 | + valCache->at<float>(fi,si) = (*featureEvaluator)( fi, si ); | |
| 750 | 726 | if ( is_buf_16u ) |
| 751 | 727 | *(udst + fi*sample_count + si) = (unsigned short)si; |
| 752 | 728 | else |
| ... | ... | @@ -758,7 +734,7 @@ struct FeatureValAndIdxPrecalc : ParallelLoopBody |
| 758 | 734 | icvSortIntAux( idst + fi*sample_count, sample_count, valCache->ptr<float>(fi) ); |
| 759 | 735 | } |
| 760 | 736 | } |
| 761 | - const BoostBRCompatibility* compat; | |
| 737 | + const FeatureEvaluator* featureEvaluator; | |
| 762 | 738 | Mat* valCache; |
| 763 | 739 | int sample_count; |
| 764 | 740 | int* idst; |
| ... | ... | @@ -768,9 +744,9 @@ struct FeatureValAndIdxPrecalc : ParallelLoopBody |
| 768 | 744 | |
| 769 | 745 | struct FeatureValOnlyPrecalc : ParallelLoopBody |
| 770 | 746 | { |
| 771 | - FeatureValOnlyPrecalc( const BoostBRCompatibility* _compat, Mat* _valCache, int _sample_count ) | |
| 747 | + FeatureValOnlyPrecalc( const FeatureEvaluator* _featureEvaluator, Mat* _valCache, int _sample_count ) | |
| 772 | 748 | { |
| 773 | - compat = _compat; | |
| 749 | + featureEvaluator = _featureEvaluator; | |
| 774 | 750 | valCache = _valCache; |
| 775 | 751 | sample_count = _sample_count; |
| 776 | 752 | } |
| ... | ... | @@ -778,9 +754,9 @@ struct FeatureValOnlyPrecalc : ParallelLoopBody |
| 778 | 754 | { |
| 779 | 755 | for ( int fi = range.start; fi < range.end; fi++) |
| 780 | 756 | for( int si = 0; si < sample_count; si++ ) |
| 781 | - valCache->at<float>(fi,si) = (*compat)( fi, si ); | |
| 757 | + valCache->at<float>(fi,si) = (*featureEvaluator)( fi, si ); | |
| 782 | 758 | } |
| 783 | - const BoostBRCompatibility* compat; | |
| 759 | + const FeatureEvaluator* featureEvaluator; | |
| 784 | 760 | Mat* valCache; |
| 785 | 761 | int sample_count; |
| 786 | 762 | }; |
| ... | ... | @@ -791,11 +767,11 @@ void CascadeBoostTrainData::precalculate() |
| 791 | 767 | |
| 792 | 768 | double proctime = -TIME( 0 ); |
| 793 | 769 | parallel_for_( Range(numPrecalcVal, numPrecalcIdx), |
| 794 | - FeatureIdxOnlyPrecalc(compat, buf, sample_count, is_buf_16u!=0) ); | |
| 770 | + FeatureIdxOnlyPrecalc(featureEvaluator, buf, sample_count, is_buf_16u!=0) ); | |
| 795 | 771 | parallel_for_( Range(0, minNum), |
| 796 | - FeatureValAndIdxPrecalc(compat, buf, &valCache, sample_count, is_buf_16u!=0) ); | |
| 772 | + FeatureValAndIdxPrecalc(featureEvaluator, buf, &valCache, sample_count, is_buf_16u!=0) ); | |
| 797 | 773 | parallel_for_( Range(minNum, numPrecalcVal), |
| 798 | - FeatureValOnlyPrecalc(compat, &valCache, sample_count) ); | |
| 774 | + FeatureValOnlyPrecalc(featureEvaluator, &valCache, sample_count) ); | |
| 799 | 775 | cout << "Precalculation time: " << (proctime + TIME( 0 )) << endl; |
| 800 | 776 | } |
| 801 | 777 | |
| ... | ... | @@ -807,7 +783,7 @@ CvDTreeNode* CascadeBoostTree::predict( int sampleIdx ) const |
| 807 | 783 | if( !node ) |
| 808 | 784 | CV_Error( CV_StsError, "The tree has not been trained yet" ); |
| 809 | 785 | |
| 810 | - if ( ((CascadeBoostTrainData*)data)->compat->maxCatCount() == 0 ) // ordered | |
| 786 | + if ( ((CascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount() == 0 ) // ordered | |
| 811 | 787 | { |
| 812 | 788 | while( node->left ) |
| 813 | 789 | { |
| ... | ... | @@ -830,7 +806,7 @@ CvDTreeNode* CascadeBoostTree::predict( int sampleIdx ) const |
| 830 | 806 | |
| 831 | 807 | void CascadeBoostTree::write( FileStorage &fs, const Mat& featureMap ) |
| 832 | 808 | { |
| 833 | - int maxCatCount = ((CascadeBoostTrainData*)data)->compat->maxCatCount(); | |
| 809 | + int maxCatCount = ((CascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount(); | |
| 834 | 810 | int subsetN = (maxCatCount + 31)/32; |
| 835 | 811 | queue<CvDTreeNode*> internalNodesQueue; |
| 836 | 812 | int size = (int)pow( 2.f, (float)ensemble->get_params().max_depth); |
| ... | ... | @@ -1123,7 +1099,7 @@ void CascadeBoostTree::markFeaturesInMap( Mat& featureMap ) |
| 1123 | 1099 | |
| 1124 | 1100 | //----------------------------------- CascadeBoost -------------------------------------- |
| 1125 | 1101 | |
| 1126 | -bool CascadeBoost::train( const BoostBRCompatibility* _compat, | |
| 1102 | +bool CascadeBoost::train( const FeatureEvaluator* _featureEvaluator, | |
| 1127 | 1103 | int _numSamples, |
| 1128 | 1104 | int _precalcValBufSize, int _precalcIdxBufSize, |
| 1129 | 1105 | const CascadeBoostParams& _params ) |
| ... | ... | @@ -1132,7 +1108,7 @@ bool CascadeBoost::train( const BoostBRCompatibility* _compat, |
| 1132 | 1108 | CV_Assert( !data ); |
| 1133 | 1109 | clear(); |
| 1134 | 1110 | |
| 1135 | - data = new CascadeBoostTrainData( _compat, _numSamples, | |
| 1111 | + data = new CascadeBoostTrainData( _featureEvaluator, _numSamples, | |
| 1136 | 1112 | _precalcValBufSize, _precalcIdxBufSize, _params ); |
| 1137 | 1113 | |
| 1138 | 1114 | CvMemStorage *storage = cvCreateMemStorage(); |
| ... | ... | @@ -1154,7 +1130,6 @@ bool CascadeBoost::train( const BoostBRCompatibility* _compat, |
| 1154 | 1130 | CascadeBoostTree* tree = new CascadeBoostTree; |
| 1155 | 1131 | if( !tree->train( data, subsample_mask, this ) ) |
| 1156 | 1132 | { |
| 1157 | - qDebug("Couldn't train tree"); | |
| 1158 | 1133 | delete tree; |
| 1159 | 1134 | break; |
| 1160 | 1135 | } |
| ... | ... | @@ -1454,7 +1429,7 @@ bool CascadeBoost::isErrDesired() |
| 1454 | 1429 | vector<float> eval(sCount); |
| 1455 | 1430 | |
| 1456 | 1431 | for( int i = 0; i < sCount; i++ ) |
| 1457 | - if( ((CascadeBoostTrainData*)data)->compat->getCls( i ) == 1.0F ) | |
| 1432 | + if( ((CascadeBoostTrainData*)data)->featureEvaluator->getCls( i ) == 1.0F ) | |
| 1458 | 1433 | eval[numPos++] = predict( i, true ); |
| 1459 | 1434 | icvSortFlt( &eval[0], numPos, 0 ); |
| 1460 | 1435 | int thresholdIdx = (int)((1.0F - minHitRate) * numPos); |
| ... | ... | @@ -1467,7 +1442,7 @@ bool CascadeBoost::isErrDesired() |
| 1467 | 1442 | |
| 1468 | 1443 | for( int i = 0; i < sCount; i++ ) |
| 1469 | 1444 | { |
| 1470 | - if( ((CascadeBoostTrainData*)data)->compat->getCls( i ) == 0.0F ) | |
| 1445 | + if( ((CascadeBoostTrainData*)data)->featureEvaluator->getCls( i ) == 0.0F ) | |
| 1471 | 1446 | { |
| 1472 | 1447 | numNeg++; |
| 1473 | 1448 | if( predict( i ) ) | ... | ... |
openbr/core/boost.h
| ... | ... | @@ -13,52 +13,36 @@ struct CascadeBoostParams : CvBoostParams |
| 13 | 13 | float maxFalseAlarm; |
| 14 | 14 | |
| 15 | 15 | CascadeBoostParams(); |
| 16 | - CascadeBoostParams( int _boostType, float _minHitRate, float _maxFalseAlarm, | |
| 17 | - double _weightTrimRate, int _maxDepth, int _maxWeakCount ); | |
| 16 | + CascadeBoostParams(int _boostType, float _minHitRate, float _maxFalseAlarm, | |
| 17 | + double _weightTrimRate, int _maxDepth, int _maxWeakCount); | |
| 18 | 18 | virtual ~CascadeBoostParams() {} |
| 19 | 19 | void write( cv::FileStorage &fs ) const; |
| 20 | 20 | }; |
| 21 | 21 | |
| 22 | -struct BoostBRCompatibility | |
| 23 | -{ | |
| 24 | - BoostBRCompatibility(Representation *_representation, int _numSamples); | |
| 25 | - void setImage(const cv::Mat &img, uchar clsLabel, int idx); | |
| 26 | - float operator()(int featureIdx, int sampleIdx) const; | |
| 27 | - const cv::Mat &getCls() const { return cls; } | |
| 28 | - float getCls(int idx) const { return cls.at<float>(idx, 0); } | |
| 29 | - | |
| 30 | - int numFeatures() const { return representation->numFeatures(); } | |
| 31 | - int maxCatCount() const { return 256; } | |
| 32 | - | |
| 33 | - Representation *representation; | |
| 34 | - cv::Mat data, cls; | |
| 35 | -}; | |
| 36 | - | |
| 37 | 22 | struct CascadeBoostTrainData : CvDTreeTrainData |
| 38 | 23 | { |
| 39 | - CascadeBoostTrainData( const BoostBRCompatibility* _compat, | |
| 40 | - const CvDTreeParams& _params ); | |
| 41 | - CascadeBoostTrainData( const BoostBRCompatibility* _compat, | |
| 42 | - int _numSamples, int _precalcValBufSize, int _precalcIdxBufSize, | |
| 43 | - const CvDTreeParams& _params = CvDTreeParams() ); | |
| 44 | - virtual void setData( const BoostBRCompatibility* _compat, | |
| 24 | + CascadeBoostTrainData(const FeatureEvaluator* _featureEvaluator, const CvDTreeParams& _params); | |
| 25 | + CascadeBoostTrainData(const FeatureEvaluator* _featureEvaluator, | |
| 45 | 26 | int _numSamples, int _precalcValBufSize, int _precalcIdxBufSize, |
| 46 | - const CvDTreeParams& _params=CvDTreeParams() ); | |
| 27 | + const CvDTreeParams& _params = CvDTreeParams()); | |
| 28 | + virtual void setData(const FeatureEvaluator* _featureEvaluator, | |
| 29 | + int _numSamples, int _precalcValBufSize, int _precalcIdxBufSize, | |
| 30 | + const CvDTreeParams& _params=CvDTreeParams()); | |
| 47 | 31 | void precalculate(); |
| 48 | 32 | |
| 49 | - virtual CvDTreeNode* subsample_data( const CvMat* _subsample_idx ); | |
| 33 | + virtual CvDTreeNode* subsample_data(const CvMat* _subsample_idx); | |
| 50 | 34 | |
| 51 | - virtual const int* get_class_labels( CvDTreeNode* n, int* labelsBuf ); | |
| 52 | - virtual const int* get_cv_labels( CvDTreeNode* n, int* labelsBuf); | |
| 53 | - virtual const int* get_sample_indices( CvDTreeNode* n, int* indicesBuf ); | |
| 35 | + virtual const int* get_class_labels(CvDTreeNode* n, int* labelsBuf); | |
| 36 | + virtual const int* get_cv_labels(CvDTreeNode* n, int* labelsBuf); | |
| 37 | + virtual const int* get_sample_indices(CvDTreeNode* n, int* indicesBuf); | |
| 54 | 38 | |
| 55 | - virtual void get_ord_var_data( CvDTreeNode* n, int vi, float* ordValuesBuf, int* sortedIndicesBuf, | |
| 56 | - const float** ordValues, const int** sortedIndices, int* sampleIndicesBuf ); | |
| 57 | - virtual const int* get_cat_var_data( CvDTreeNode* n, int vi, int* catValuesBuf ); | |
| 58 | - virtual float getVarValue( int vi, int si ); | |
| 39 | + virtual void get_ord_var_data(CvDTreeNode* n, int vi, float* ordValuesBuf, int* sortedIndicesBuf, | |
| 40 | + const float** ordValues, const int** sortedIndices, int* sampleIndicesBuf); | |
| 41 | + virtual const int* get_cat_var_data(CvDTreeNode* n, int vi, int* catValuesBuf); | |
| 42 | + virtual float getVarValue(int vi, int si); | |
| 59 | 43 | virtual void free_train_data(); |
| 60 | 44 | |
| 61 | - const BoostBRCompatibility* compat; | |
| 45 | + const FeatureEvaluator* featureEvaluator; | |
| 62 | 46 | cv::Mat valCache; // precalculated feature values (CV_32FC1) |
| 63 | 47 | CvMat _resp; // for casting |
| 64 | 48 | int numPrecalcVal, numPrecalcIdx; |
| ... | ... | @@ -67,27 +51,29 @@ struct CascadeBoostTrainData : CvDTreeTrainData |
| 67 | 51 | class CascadeBoostTree : public CvBoostTree |
| 68 | 52 | { |
| 69 | 53 | public: |
| 70 | - virtual CvDTreeNode* predict( int sampleIdx ) const; | |
| 71 | - void write( cv::FileStorage &fs, const cv::Mat& featureMap ); | |
| 72 | - void markFeaturesInMap( cv::Mat& featureMap ); | |
| 54 | + virtual CvDTreeNode* predict(int sampleIdx) const; | |
| 55 | + void write(cv::FileStorage &fs, const cv::Mat& featureMap); | |
| 56 | + void markFeaturesInMap(cv::Mat& featureMap); | |
| 57 | + | |
| 73 | 58 | protected: |
| 74 | - virtual void split_node_data( CvDTreeNode* n ); | |
| 59 | + virtual void split_node_data(CvDTreeNode* n); | |
| 75 | 60 | }; |
| 76 | 61 | |
| 77 | 62 | class CascadeBoost : public CvBoost |
| 78 | 63 | { |
| 79 | 64 | public: |
| 80 | - virtual bool train( const BoostBRCompatibility* _compat, | |
| 81 | - int _numSamples, int _precalcValBufSize, int _precalcIdxBufSize, | |
| 82 | - const CascadeBoostParams& _params=CascadeBoostParams() ); | |
| 65 | + virtual bool train(const FeatureEvaluator *_featureEvaluator, | |
| 66 | + int _numSamples, int _precalcValBufSize, int _precalcIdxBufSize, | |
| 67 | + const CascadeBoostParams &_params=CascadeBoostParams()); | |
| 83 | 68 | virtual float predict( int sampleIdx, bool returnSum = false ) const; |
| 84 | 69 | |
| 85 | 70 | float getThreshold() const { return threshold; } |
| 86 | - void write( cv::FileStorage &fs, const cv::Mat& featureMap ) const; | |
| 87 | - void markUsedFeaturesInMap( cv::Mat& featureMap ); | |
| 71 | + void write(cv::FileStorage &fs, const cv::Mat& featureMap) const; | |
| 72 | + void markUsedFeaturesInMap(cv::Mat& featureMap); | |
| 73 | + | |
| 88 | 74 | protected: |
| 89 | - virtual bool set_params( const CvBoostParams& _params ); | |
| 90 | - virtual void update_weights( CvBoostTree* tree ); | |
| 75 | + virtual bool set_params(const CvBoostParams& _params); | |
| 76 | + virtual void update_weights(CvBoostTree* tree); | |
| 91 | 77 | virtual bool isErrDesired(); |
| 92 | 78 | |
| 93 | 79 | float threshold; | ... | ... |
openbr/core/cascade.cpp
| ... | ... | @@ -3,17 +3,17 @@ |
| 3 | 3 | #include <iostream> |
| 4 | 4 | #include <fstream> |
| 5 | 5 | |
| 6 | +using namespace std; | |
| 6 | 7 | using namespace br; |
| 7 | 8 | using namespace cv; |
| 8 | 9 | |
| 9 | -bool CascadeImageReader::create( const QList<Mat> &_posImages, const QList<Mat> &_negImages, Size _winSize ) | |
| 10 | +bool CascadeImageReader::create(const QList<Mat> &_posImages, const QList<Mat> &_negImages, Size _winSize) | |
| 10 | 11 | { |
| 11 | 12 | posImages = _posImages; |
| 12 | 13 | negImages = _negImages; |
| 13 | 14 | winSize = _winSize; |
| 14 | 15 | |
| 15 | - posIdx = 0; negIdx = 0; | |
| 16 | - round = 0; | |
| 16 | + posIdx = negIdx = 0; | |
| 17 | 17 | |
| 18 | 18 | src.create( 0, 0 , CV_8UC1 ); |
| 19 | 19 | img.create( 0, 0, CV_8UC1 ); |
| ... | ... | @@ -21,22 +21,32 @@ bool CascadeImageReader::create( const QList<Mat> &_posImages, const QList<Mat> |
| 21 | 21 | scale = 1.0F; |
| 22 | 22 | scaleFactor = 1.4142135623730950488016887242097F; |
| 23 | 23 | stepFactor = 0.5F; |
| 24 | + round = 0; | |
| 24 | 25 | |
| 25 | 26 | return true; |
| 26 | 27 | } |
| 27 | 28 | |
| 28 | 29 | bool CascadeImageReader::nextNeg() |
| 29 | 30 | { |
| 30 | - src = negImages[negIdx++]; | |
| 31 | - | |
| 32 | - round += negIdx / negImages.size(); | |
| 33 | - round = round % (winSize.width * winSize.height); | |
| 34 | - negIdx %= negImages.size(); | |
| 35 | - | |
| 36 | - offset.x = std::min( (int)round % winSize.width, src.cols - winSize.width ); | |
| 37 | - offset.y = std::min( (int)round / winSize.width, src.rows - winSize.height ); | |
| 31 | + Point _offset = Point(0,0); | |
| 32 | + size_t count = negImages.size(); | |
| 33 | + for (size_t i = 0; i < count; i++) { | |
| 34 | + src = negImages[negIdx++]; | |
| 35 | + if( src.empty() ) | |
| 36 | + continue; | |
| 37 | + round += negIdx / count; | |
| 38 | + round = round % (winSize.width * winSize.height); | |
| 39 | + negIdx %= count; | |
| 40 | + | |
| 41 | + _offset.x = std::min( (int)round % winSize.width, src.cols - winSize.width ); | |
| 42 | + _offset.y = std::min( (int)round / winSize.width, src.rows - winSize.height ); | |
| 43 | + if( !src.empty() && src.type() == CV_8UC1 && _offset.x >= 0 && _offset.y >= 0 ) | |
| 44 | + break; | |
| 45 | + } | |
| 38 | 46 | |
| 39 | - point = offset; | |
| 47 | + if( src.empty() ) | |
| 48 | + return false; // no appropriate image | |
| 49 | + point = offset = _offset; | |
| 40 | 50 | scale = max( ((float)winSize.width + point.x) / ((float)src.cols), |
| 41 | 51 | ((float)winSize.height + point.y) / ((float)src.rows) ); |
| 42 | 52 | |
| ... | ... | @@ -56,7 +66,8 @@ bool CascadeImageReader::getNeg( Mat& _img ) |
| 56 | 66 | if ( !nextNeg() ) |
| 57 | 67 | return false; |
| 58 | 68 | |
| 59 | - Mat mat( winSize.height, winSize.width, CV_8UC1, (void*)(img.data + point.y * img.step + point.x * img.elemSize()), img.step ); | |
| 69 | + Mat mat( winSize.height, winSize.width, CV_8UC1, | |
| 70 | + (void*)(img.data + point.y * img.step + point.x * img.elemSize()), img.step ); | |
| 60 | 71 | mat.copyTo(_img); |
| 61 | 72 | |
| 62 | 73 | if( (int)( point.x + (1.0F + stepFactor ) * winSize.width ) < img.cols ) |
| ... | ... | @@ -82,11 +93,10 @@ bool CascadeImageReader::getNeg( Mat& _img ) |
| 82 | 93 | return true; |
| 83 | 94 | } |
| 84 | 95 | |
| 96 | + | |
| 85 | 97 | bool CascadeImageReader::getPos(Mat &_img) |
| 86 | 98 | { |
| 87 | - if (posIdx > (int)posImages.size()) | |
| 88 | - CV_Error( CV_StsBadArg, "Can not get new positive sample. Not enough positive samples.\n"); | |
| 89 | - _img = posImages[posIdx++]; | |
| 99 | + posImages[posIdx++].copyTo(_img); | |
| 90 | 100 | return true; |
| 91 | 101 | } |
| 92 | 102 | |
| ... | ... | @@ -95,15 +105,16 @@ bool CascadeImageReader::getPos(Mat &_img) |
| 95 | 105 | bool BrCascadeClassifier::train(const string _cascadeDirName, |
| 96 | 106 | const QList<Mat> &_posImages, |
| 97 | 107 | const QList<Mat> &_negImages, |
| 108 | + int _numPos, int _numNeg, | |
| 98 | 109 | int _precalcValBufSize, int _precalcIdxBufSize, |
| 99 | - int _numPos, int _numNeg, int _numStages, | |
| 100 | - Representation *_representation, | |
| 110 | + int _numStages, | |
| 111 | + Size _winSize, | |
| 101 | 112 | const CascadeBoostParams& _stageParams) |
| 102 | 113 | { |
| 103 | 114 | // Start recording clock ticks for training time output |
| 104 | 115 | const clock_t begin_time = clock(); |
| 105 | 116 | |
| 106 | - if( _cascadeDirName.empty() ) | |
| 117 | + if (_cascadeDirName.empty()) | |
| 107 | 118 | CV_Error( CV_StsBadArg, "_cascadeDirName is NULL" ); |
| 108 | 119 | |
| 109 | 120 | string dirName; |
| ... | ... | @@ -112,14 +123,17 @@ bool BrCascadeClassifier::train(const string _cascadeDirName, |
| 112 | 123 | else |
| 113 | 124 | dirName = _cascadeDirName + '/'; |
| 114 | 125 | |
| 115 | - compat = new BoostBRCompatibility(_representation, _numPos + _numNeg); | |
| 126 | + winSize = _winSize; | |
| 127 | + | |
| 116 | 128 | numPos = _numPos; |
| 117 | 129 | numNeg = _numNeg; |
| 118 | 130 | numStages = _numStages; |
| 119 | - imgReader.create(_posImages, _negImages, _representation->windowSize()); | |
| 131 | + imgReader.create(_posImages, _negImages, winSize); | |
| 120 | 132 | |
| 121 | 133 | stageParams = new CascadeBoostParams; |
| 122 | 134 | *stageParams = _stageParams; |
| 135 | + featureEvaluator = new FeatureEvaluator; | |
| 136 | + featureEvaluator->init(numPos + numNeg, winSize); | |
| 123 | 137 | stageClassifiers.reserve( numStages ); |
| 124 | 138 | |
| 125 | 139 | double requiredLeafFARate = pow( (double) stageParams->maxFalseAlarm, (double) numStages ) / |
| ... | ... | @@ -127,31 +141,29 @@ bool BrCascadeClassifier::train(const string _cascadeDirName, |
| 127 | 141 | double tempLeafFARate; |
| 128 | 142 | |
| 129 | 143 | for (int i = 0; i < numStages; i++) { |
| 130 | - qDebug() << endl << "===== TRAINING " << i << "-stage ====="; | |
| 131 | - qDebug() << "<BEGIN"; | |
| 132 | - if ( !updateTrainingSet( tempLeafFARate ) ) | |
| 133 | - { | |
| 134 | - qDebug() << "Train dataset for temp stage can not be filled. " | |
| 135 | - "Branch training terminated."; | |
| 144 | + cout << endl << "===== TRAINING " << i << "-stage =====" << endl; | |
| 145 | + cout << "<BEGIN" << endl; | |
| 146 | + if (!updateTrainingSet(tempLeafFARate)) { | |
| 147 | + cout << "Train dataset for temp stage can not be filled. " | |
| 148 | + "Branch training terminated." << endl; | |
| 136 | 149 | break; |
| 137 | 150 | } |
| 138 | - if( tempLeafFARate <= requiredLeafFARate ) | |
| 139 | - { | |
| 140 | - qDebug() << "Required leaf false alarm rate achieved. " | |
| 141 | - "Branch training terminated."; | |
| 151 | + if (tempLeafFARate <= requiredLeafFARate) { | |
| 152 | + cout << "Required leaf false alarm rate achieved. " | |
| 153 | + "Branch training terminated." << endl; | |
| 142 | 154 | break; |
| 143 | 155 | } |
| 144 | 156 | |
| 145 | 157 | CascadeBoost* tempStage = new CascadeBoost; |
| 146 | - bool isStageTrained = tempStage->train( (BoostBRCompatibility*)compat, | |
| 158 | + bool isStageTrained = tempStage->train( (FeatureEvaluator*)featureEvaluator, | |
| 147 | 159 | curNumSamples, _precalcValBufSize, _precalcIdxBufSize, |
| 148 | 160 | *((CascadeBoostParams*)stageParams) ); |
| 149 | - qDebug() << "END>"; | |
| 161 | + cout << "END>" << endl; | |
| 150 | 162 | |
| 151 | - if(!isStageTrained) | |
| 163 | + if (!isStageTrained) | |
| 152 | 164 | break; |
| 153 | 165 | |
| 154 | - stageClassifiers.push_back( tempStage ); | |
| 166 | + stageClassifiers.push_back(tempStage); | |
| 155 | 167 | |
| 156 | 168 | // Output training time up till now |
| 157 | 169 | float seconds = float( clock () - begin_time ) / CLOCKS_PER_SEC; |
| ... | ... | @@ -159,12 +171,11 @@ bool BrCascadeClassifier::train(const string _cascadeDirName, |
| 159 | 171 | int hours = (int(seconds) / 60 / 60) % 24; |
| 160 | 172 | int minutes = (int(seconds) / 60) % 60; |
| 161 | 173 | int seconds_left = int(seconds) % 60; |
| 162 | - qDebug() << "Training until now has taken " << days << " days " << hours << " hours " << minutes << " minutes " << seconds_left <<" seconds."; | |
| 174 | + cout << "Training until now has taken " << days << " days " << hours << " hours " << minutes << " minutes " << seconds_left <<" seconds." << endl; | |
| 163 | 175 | } |
| 164 | 176 | |
| 165 | - if(stageClassifiers.size() == 0) | |
| 166 | - { | |
| 167 | - qDebug() << "Cascade classifier can't be trained. Check the used training parameters."; | |
| 177 | + if (stageClassifiers.size() == 0) { | |
| 178 | + cout << "Cascade classifier can't be trained. Check the used training parameters." << endl; | |
| 168 | 179 | return false; |
| 169 | 180 | } |
| 170 | 181 | |
| ... | ... | @@ -193,7 +204,7 @@ bool BrCascadeClassifier::updateTrainingSet( double& acceptanceRatio) |
| 193 | 204 | int posCount = fillPassedSamples( 0, numPos, true, posConsumed ); |
| 194 | 205 | if( !posCount ) |
| 195 | 206 | return false; |
| 196 | - qDebug() << "POS count : consumed " << posCount << " : " << (int)posConsumed; | |
| 207 | + cout << "POS count : consumed " << posCount << " : " << (int)posConsumed << endl; | |
| 197 | 208 | |
| 198 | 209 | int proNumNeg = cvRound( ( ((double)numNeg) * ((double)posCount) ) / numPos ); // apply only a fraction of negative samples. double is required since overflow is possible |
| 199 | 210 | int negCount = fillPassedSamples( posCount, proNumNeg, false, negConsumed ); |
| ... | ... | @@ -202,14 +213,14 @@ bool BrCascadeClassifier::updateTrainingSet( double& acceptanceRatio) |
| 202 | 213 | |
| 203 | 214 | curNumSamples = posCount + negCount; |
| 204 | 215 | acceptanceRatio = negConsumed == 0 ? 0 : ( (double)negCount/(double)(int64)negConsumed ); |
| 205 | - qDebug() << "NEG count : acceptanceRatio " << negCount << " : " << acceptanceRatio; | |
| 216 | + cout << "NEG count : acceptanceRatio " << negCount << " : " << acceptanceRatio << endl; | |
| 206 | 217 | return true; |
| 207 | 218 | } |
| 208 | 219 | |
| 209 | 220 | int BrCascadeClassifier::fillPassedSamples( int first, int count, bool isPositive, int64& consumed ) |
| 210 | 221 | { |
| 211 | 222 | int getcount = 0; |
| 212 | - Mat img(compat->representation->windowSize(), CV_8UC1); | |
| 223 | + Mat img(winSize, CV_8UC1); | |
| 213 | 224 | for( int i = first; i < first + count; i++ ) |
| 214 | 225 | { |
| 215 | 226 | for( ; ; ) |
| ... | ... | @@ -220,7 +231,7 @@ int BrCascadeClassifier::fillPassedSamples( int first, int count, bool isPositiv |
| 220 | 231 | return getcount; |
| 221 | 232 | consumed++; |
| 222 | 233 | |
| 223 | - compat->setImage( img, isPositive ? 1 : 0, i ); | |
| 234 | + featureEvaluator->setImage( img, isPositive ? 1 : 0, i ); | |
| 224 | 235 | if( predict( i ) == 1.0F ) |
| 225 | 236 | { |
| 226 | 237 | getcount++; |
| ... | ... | @@ -238,13 +249,18 @@ void BrCascadeClassifier::writeParams( FileStorage &fs ) const |
| 238 | 249 | fs << CC_FEATURE_TYPE << CC_LBP; |
| 239 | 250 | fs << CC_HEIGHT << winSize.height; |
| 240 | 251 | fs << CC_WIDTH << winSize.width; |
| 252 | + | |
| 241 | 253 | fs << CC_STAGE_PARAMS << "{"; stageParams->write( fs ); fs << "}"; |
| 242 | - fs << CC_FEATURE_PARAMS << "{"; fs << CC_MAX_CAT_COUNT << 256; fs << CC_FEATURE_SIZE << 1; fs << "}"; | |
| 254 | + | |
| 255 | + fs << CC_FEATURE_PARAMS << "{"; | |
| 256 | + fs << CC_MAX_CAT_COUNT << featureEvaluator->getMaxCatCount(); | |
| 257 | + fs << CC_FEATURE_SIZE << featureEvaluator->getFeatureSize(); | |
| 258 | + fs << "}"; | |
| 243 | 259 | } |
| 244 | 260 | |
| 245 | 261 | void BrCascadeClassifier::writeFeatures( FileStorage &fs, const Mat& featureMap ) const |
| 246 | 262 | { |
| 247 | - compat->representation->write(fs, featureMap); | |
| 263 | + ((FeatureEvaluator*)((Ptr<FeatureEvaluator>)featureEvaluator))->writeFeatures( fs, featureMap ); | |
| 248 | 264 | } |
| 249 | 265 | |
| 250 | 266 | void BrCascadeClassifier::writeStages( FileStorage &fs, const Mat& featureMap ) const |
| ... | ... | @@ -285,7 +301,7 @@ void BrCascadeClassifier::save(const string filename) |
| 285 | 301 | |
| 286 | 302 | void BrCascadeClassifier::getUsedFeaturesIdxMap( Mat& featureMap ) |
| 287 | 303 | { |
| 288 | - int varCount = compat->numFeatures(); | |
| 304 | + int varCount = featureEvaluator->getNumFeatures() * featureEvaluator->getFeatureSize(); | |
| 289 | 305 | featureMap.create( 1, varCount, CV_32SC1 ); |
| 290 | 306 | featureMap.setTo(Scalar(-1)); |
| 291 | 307 | |
| ... | ... | @@ -298,4 +314,3 @@ void BrCascadeClassifier::getUsedFeaturesIdxMap( Mat& featureMap ) |
| 298 | 314 | featureMap.ptr<int>(0)[fi] = idx++; |
| 299 | 315 | } |
| 300 | 316 | |
| 301 | - | ... | ... |
openbr/core/cascade.h
| 1 | -#ifndef _CASCADE_H | |
| 2 | -#define _CASCADE_H | |
| 1 | +#ifndef CASCADE_H | |
| 2 | +#define CASCADE_H | |
| 3 | 3 | |
| 4 | 4 | #include <openbr/openbr_plugin.h> |
| 5 | 5 | #include <opencv2/highgui/highgui.hpp> |
| ... | ... | @@ -17,9 +17,6 @@ public: |
| 17 | 17 | bool getNeg(cv::Mat &_img); |
| 18 | 18 | bool getPos(cv::Mat &_img); |
| 19 | 19 | |
| 20 | -private: | |
| 21 | - bool nextNeg(); | |
| 22 | - | |
| 23 | 20 | QList<cv::Mat> posImages, negImages; |
| 24 | 21 | |
| 25 | 22 | int posIdx, negIdx; |
| ... | ... | @@ -31,42 +28,45 @@ private: |
| 31 | 28 | float stepFactor; |
| 32 | 29 | size_t round; |
| 33 | 30 | cv::Size winSize; |
| 31 | + | |
| 32 | +private: | |
| 33 | + bool nextNeg(); | |
| 34 | 34 | }; |
| 35 | 35 | |
| 36 | + | |
| 36 | 37 | class BrCascadeClassifier |
| 37 | 38 | { |
| 38 | 39 | public: |
| 39 | 40 | bool train(const std::string _cascadeDirName, |
| 40 | - const QList<cv::Mat> &_posImages, | |
| 41 | - const QList<cv::Mat> &_negImages, | |
| 42 | - int _precalcValBufSize, int _precalcIdxBufSize, | |
| 43 | - int _numPos, int _numNeg, int _numStages, | |
| 44 | - Representation *_representation, | |
| 45 | - const CascadeBoostParams& _stageParams); | |
| 41 | + const QList<cv::Mat> &_posImages, | |
| 42 | + const QList<cv::Mat> &_negImages, | |
| 43 | + int _numPos, int _numNeg, | |
| 44 | + int _precalcValBufSize, int _precalcIdxBufSize, | |
| 45 | + int _numStages, | |
| 46 | + cv::Size _winSize, | |
| 47 | + const CascadeBoostParams& _stageParams); | |
| 46 | 48 | private: |
| 47 | - int predict( int sampleIdx ); | |
| 49 | + int predict(int sampleIdx); | |
| 48 | 50 | void save(const std::string cascadeDirName); |
| 49 | - bool updateTrainingSet( double& acceptanceRatio ); | |
| 50 | - int fillPassedSamples( int first, int count, bool isPositive, int64& consumed ); | |
| 51 | + bool updateTrainingSet(double& acceptanceRatio); | |
| 52 | + int fillPassedSamples(int first, int count, bool isPositive, int64& consumed); | |
| 51 | 53 | |
| 52 | - void writeParams( cv::FileStorage &fs ) const; | |
| 53 | - void writeStages( cv::FileStorage &fs, const cv::Mat& featureMap ) const; | |
| 54 | - void writeFeatures( cv::FileStorage &fs, const cv::Mat& featureMap ) const; | |
| 54 | + void writeParams(cv::FileStorage &fs) const; | |
| 55 | + void writeStages(cv::FileStorage &fs, const cv::Mat& featureMap) const; | |
| 56 | + void writeFeatures(cv::FileStorage &fs, const cv::Mat& featureMap) const; | |
| 55 | 57 | |
| 56 | - void getUsedFeaturesIdxMap( cv::Mat& featureMap ); | |
| 58 | + void getUsedFeaturesIdxMap(cv::Mat& featureMap); | |
| 57 | 59 | |
| 58 | 60 | cv::Ptr<CascadeBoostParams> stageParams; |
| 59 | 61 | |
| 60 | - cv::Ptr<LBPTrainingEvaluator> featureEvaluator; | |
| 61 | - cv::Ptr<BoostBRCompatibility> compat; | |
| 62 | + cv::Ptr<FeatureEvaluator> featureEvaluator; | |
| 62 | 63 | std::vector< cv::Ptr<CascadeBoost> > stageClassifiers; |
| 63 | 64 | CascadeImageReader imgReader; |
| 64 | - cv::Size winSize; | |
| 65 | 65 | int numStages, curNumSamples; |
| 66 | 66 | int numPos, numNeg; |
| 67 | + cv::Size winSize; | |
| 67 | 68 | }; |
| 68 | 69 | |
| 69 | 70 | } // namespace br |
| 70 | 71 | |
| 71 | -#endif // _CASCADE_H | |
| 72 | - | |
| 72 | +#endif // CASCADE_H | ... | ... |
openbr/core/features.cpp
| 1 | 1 | #include "features.h" |
| 2 | -#include "opencvutils.h" | |
| 3 | 2 | |
| 4 | 3 | using namespace cv; |
| 5 | 4 | using namespace br; |
| 6 | 5 | |
| 7 | -// ------------------------------------ LBP Training ----------------------------------------------- | |
| 6 | +//------------------------------------- FeatureEvaluator --------------------------------------- | |
| 8 | 7 | |
| 9 | -void LBPTrainingEvaluator::init(int _maxSampleCount, Size _winSize) | |
| 8 | +void FeatureEvaluator::init(int _maxSampleCount, Size _winSize ) | |
| 10 | 9 | { |
| 11 | - CV_Assert( _maxSampleCount > 0); | |
| 12 | - sum.create((int)_maxSampleCount, (_winSize.width + 1) * (_winSize.height + 1), CV_32SC1); | |
| 13 | - | |
| 10 | + CV_Assert(_maxSampleCount > 0); | |
| 14 | 11 | winSize = _winSize; |
| 15 | 12 | numFeatures = 0; |
| 16 | - maxCatCount = 256; | |
| 13 | + data.create((int)_maxSampleCount, (_winSize.width + 1) * (_winSize.height + 1), CV_32SC1); | |
| 17 | 14 | cls.create( (int)_maxSampleCount, 1, CV_32FC1 ); |
| 15 | + | |
| 16 | + maxCatCount = 256; | |
| 17 | + featSize = 1; | |
| 18 | + | |
| 18 | 19 | generateFeatures(); |
| 19 | 20 | } |
| 20 | 21 | |
| 21 | -void LBPTrainingEvaluator::setImage(const Mat &img, uchar clsLabel, int idx) | |
| 22 | +void FeatureEvaluator::setImage(const Mat &img, uchar clsLabel, int idx) | |
| 22 | 23 | { |
| 23 | - CV_DbgAssert( !sum.empty() ); | |
| 24 | + CV_Assert(img.cols == winSize.width); | |
| 25 | + CV_Assert(img.rows == winSize.height); | |
| 26 | + CV_Assert(idx < cls.rows); | |
| 24 | 27 | cls.ptr<float>(idx)[0] = clsLabel; |
| 25 | - Mat innSum(winSize.height + 1, winSize.width + 1, sum.type(), sum.ptr<int>((int)idx)); | |
| 26 | - integral( img, innSum ); | |
| 28 | + Mat integralImg(winSize.height + 1, winSize.width + 1, data.type(), data.ptr<int>(idx)); | |
| 29 | + integral(img, integralImg); | |
| 27 | 30 | } |
| 28 | 31 | |
| 29 | -void LBPTrainingEvaluator::writeFeatures( FileStorage &fs, const Mat& featureMap ) const | |
| 32 | +void FeatureEvaluator::writeFeatures(FileStorage &fs, const Mat &featureMap) const | |
| 30 | 33 | { |
| 31 | 34 | _writeFeatures( features, fs, featureMap ); |
| 32 | 35 | } |
| 33 | 36 | |
| 34 | -void LBPTrainingEvaluator::generateFeatures() | |
| 37 | +void FeatureEvaluator::generateFeatures() | |
| 35 | 38 | { |
| 36 | 39 | int offset = winSize.width + 1; |
| 37 | - for( int x = 0; x < winSize.width; x++ ) | |
| 38 | - for( int y = 0; y < winSize.height; y++ ) | |
| 39 | - for( int w = 1; w <= winSize.width / 3; w++ ) | |
| 40 | - for( int h = 1; h <= winSize.height / 3; h++ ) | |
| 41 | - if ( (x+3*w <= winSize.width) && (y+3*h <= winSize.height) ) | |
| 42 | - features.push_back( Feature(offset, x, y, w, h ) ); | |
| 40 | + for (int x = 0; x < winSize.width; x++) | |
| 41 | + for (int y = 0; y < winSize.height; y++) | |
| 42 | + for (int w = 1; w <= winSize.width / 3; w++) | |
| 43 | + for (int h = 1; h <= winSize.height / 3; h++) | |
| 44 | + if ((x+3*w <= winSize.width) && (y+3*h <= winSize.height)) | |
| 45 | + features.push_back(Feature(offset, x, y, w, h )); | |
| 43 | 46 | numFeatures = (int)features.size(); |
| 44 | 47 | } |
| 45 | 48 | |
| 46 | -LBPTrainingEvaluator::Feature::Feature() | |
| 49 | +FeatureEvaluator::Feature::Feature() | |
| 47 | 50 | { |
| 48 | 51 | rect = cvRect(0, 0, 0, 0); |
| 49 | 52 | } |
| 50 | 53 | |
| 51 | -LBPTrainingEvaluator::Feature::Feature( int offset, int x, int y, int _blockWidth, int _blockHeight ) | |
| 54 | +FeatureEvaluator::Feature::Feature( int offset, int x, int y, int _blockWidth, int _blockHeight ) | |
| 52 | 55 | { |
| 53 | 56 | Rect tr = rect = cvRect(x, y, _blockWidth, _blockHeight); |
| 54 | 57 | CV_SUM_OFFSETS( p[0], p[1], p[4], p[5], tr, offset ) |
| ... | ... | @@ -60,7 +63,7 @@ LBPTrainingEvaluator::Feature::Feature( int offset, int x, int y, int _blockWidt |
| 60 | 63 | CV_SUM_OFFSETS( p[8], p[9], p[12], p[13], tr, offset ) |
| 61 | 64 | } |
| 62 | 65 | |
| 63 | -void LBPTrainingEvaluator::Feature::write(FileStorage &fs) const | |
| 66 | +void FeatureEvaluator::Feature::write(FileStorage &fs) const | |
| 64 | 67 | { |
| 65 | 68 | fs << CC_RECT << "[:" << rect.x << rect.y << rect.width << rect.height << "]"; |
| 66 | 69 | } | ... | ... |
openbr/core/features.h
| ... | ... | @@ -27,7 +27,7 @@ |
| 27 | 27 | #define CC_MINHITRATE "minHitRate" |
| 28 | 28 | #define CC_MAXFALSEALARM "maxFalseAlarm" |
| 29 | 29 | #define CC_TRIM_RATE "weightTrimRate" |
| 30 | -#define CC_MAX_DEPTH "maxDepth" | |
| 30 | +#define CC_MAX_DEPTH "maxDeptrh" | |
| 31 | 31 | #define CC_WEAK_COUNT "maxWeakCount" |
| 32 | 32 | #define CC_STAGE_THRESHOLD "stageThreshold" |
| 33 | 33 | #define CC_WEAK_CLASSIFIERS "weakClassifiers" |
| ... | ... | @@ -39,24 +39,9 @@ |
| 39 | 39 | #define CC_MAX_CAT_COUNT "maxCatCount" |
| 40 | 40 | #define CC_FEATURE_SIZE "featSize" |
| 41 | 41 | |
| 42 | -#define CC_HAAR "HAAR" | |
| 43 | -#define CC_MODE "mode" | |
| 44 | -#define CC_MODE_BASIC "BASIC" | |
| 45 | -#define CC_MODE_CORE "CORE" | |
| 46 | -#define CC_MODE_ALL "ALL" | |
| 47 | -#define CC_RECTS "rects" | |
| 48 | -#define CC_TILTED "tilted" | |
| 49 | - | |
| 50 | 42 | #define CC_LBP "LBP" |
| 51 | 43 | #define CC_RECT "rect" |
| 52 | 44 | |
| 53 | -#define CC_HOG "HOG" | |
| 54 | -#define CC_HOGMULTI "HOGMulti" | |
| 55 | - | |
| 56 | -#define CC_NPD "NPD" | |
| 57 | -#define CC_POINTS "points" | |
| 58 | -#define CC_POINT "point" | |
| 59 | - | |
| 60 | 45 | #ifdef _WIN32 |
| 61 | 46 | #define TIME( arg ) (((double) clock()) / CLOCKS_PER_SEC) |
| 62 | 47 | #else |
| ... | ... | @@ -73,43 +58,6 @@ |
| 73 | 58 | /* (x + w, y + h) */ \ |
| 74 | 59 | (p3) = (rect).x + (rect).width + (step) * ((rect).y + (rect).height); |
| 75 | 60 | |
| 76 | -#define CV_TILTED_OFFSETS( p0, p1, p2, p3, rect, step ) \ | |
| 77 | - /* (x, y) */ \ | |
| 78 | - (p0) = (rect).x + (step) * (rect).y; \ | |
| 79 | - /* (x - h, y + h) */ \ | |
| 80 | - (p1) = (rect).x - (rect).height + (step) * ((rect).y + (rect).height);\ | |
| 81 | - /* (x + w, y + w) */ \ | |
| 82 | - (p2) = (rect).x + (rect).width + (step) * ((rect).y + (rect).width); \ | |
| 83 | - /* (x + w - h, y + w + h) */ \ | |
| 84 | - (p3) = (rect).x + (rect).width - (rect).height \ | |
| 85 | - + (step) * ((rect).y + (rect).width + (rect).height); | |
| 86 | - | |
| 87 | -#define CV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \ | |
| 88 | - /* (x, y) */ \ | |
| 89 | - (p0) = sum + (rect).x + (step) * (rect).y, \ | |
| 90 | - /* (x + w, y) */ \ | |
| 91 | - (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \ | |
| 92 | - /* (x, y + h) */ \ | |
| 93 | - (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \ | |
| 94 | - /* (x + w, y + h) */ \ | |
| 95 | - (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height) | |
| 96 | - | |
| 97 | -#define CV_TILTED_PTRS( p0, p1, p2, p3, tilted, rect, step ) \ | |
| 98 | - /* (x, y) */ \ | |
| 99 | - (p0) = tilted + (rect).x + (step) * (rect).y, \ | |
| 100 | - /* (x - h, y + h) */ \ | |
| 101 | - (p1) = tilted + (rect).x - (rect).height + (step) * ((rect).y + (rect).height), \ | |
| 102 | - /* (x + w, y + w) */ \ | |
| 103 | - (p2) = tilted + (rect).x + (rect).width + (step) * ((rect).y + (rect).width), \ | |
| 104 | - /* (x + w - h, y + w + h) */ \ | |
| 105 | - (p3) = tilted + (rect).x + (rect).width - (rect).height \ | |
| 106 | - + (step) * ((rect).y + (rect).width + (rect).height) | |
| 107 | - | |
| 108 | -#define CALC_SUM_(p0, p1, p2, p3, offset) \ | |
| 109 | - ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset]) | |
| 110 | - | |
| 111 | -#define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset) | |
| 112 | - | |
| 113 | 61 | namespace br |
| 114 | 62 | { |
| 115 | 63 | |
| ... | ... | @@ -128,32 +76,30 @@ void _writeFeatures( const std::vector<Feature> features, cv::FileStorage &fs, c |
| 128 | 76 | fs << "]"; |
| 129 | 77 | } |
| 130 | 78 | |
| 131 | -//------------------------- LBP Feature --------------------------------- | |
| 132 | - | |
| 133 | -class LBPTrainingEvaluator | |
| 79 | +class FeatureEvaluator | |
| 134 | 80 | { |
| 135 | 81 | public: |
| 136 | - virtual ~LBPTrainingEvaluator() {} | |
| 137 | - virtual void init(int _maxSampleCount, cv::Size _winSize ); | |
| 138 | - virtual void setImage(const cv::Mat& img, uchar clsLabel, int idx); | |
| 139 | - virtual float operator()(int featureIdx, int sampleIdx) const { return (float)features[featureIdx].calc( sum, sampleIdx); } | |
| 140 | - virtual void writeFeatures( cv::FileStorage &fs, const cv::Mat& featureMap ) const; | |
| 82 | + ~FeatureEvaluator() {} | |
| 83 | + void init(int _maxSampleCount, cv::Size _winSize); | |
| 84 | + void setImage(const cv::Mat& img, uchar clsLabel, int idx); | |
| 85 | + void writeFeatures(cv::FileStorage &fs, const cv::Mat& featureMap) const; | |
| 86 | + float operator()(int featureIdx, int sampleIdx) const { return (float)features[featureIdx].calc(data, sampleIdx); } | |
| 141 | 87 | |
| 142 | 88 | int getNumFeatures() const { return numFeatures; } |
| 143 | 89 | int getMaxCatCount() const { return maxCatCount; } |
| 144 | - int getFeatureSize() const { return 1; } | |
| 90 | + int getFeatureSize() const { return featSize; } | |
| 145 | 91 | const cv::Mat& getCls() const { return cls; } |
| 146 | 92 | float getCls(int si) const { return cls.at<float>(si, 0); } |
| 147 | 93 | |
| 148 | 94 | protected: |
| 149 | - virtual void generateFeatures(); | |
| 95 | + void generateFeatures(); | |
| 150 | 96 | |
| 151 | 97 | class Feature |
| 152 | 98 | { |
| 153 | 99 | public: |
| 154 | 100 | Feature(); |
| 155 | 101 | Feature( int offset, int x, int y, int _block_w, int _block_h ); |
| 156 | - uchar calc( const cv::Mat& _sum, size_t y ) const; | |
| 102 | + uchar calc( const cv::Mat &data, int y ) const; | |
| 157 | 103 | void write( cv::FileStorage &fs ) const; |
| 158 | 104 | |
| 159 | 105 | cv::Rect rect; |
| ... | ... | @@ -161,26 +107,29 @@ protected: |
| 161 | 107 | }; |
| 162 | 108 | std::vector<Feature> features; |
| 163 | 109 | |
| 110 | + cv::Mat data, cls; | |
| 111 | + | |
| 164 | 112 | int npos, nneg; |
| 165 | - int maxCatCount; | |
| 166 | 113 | int numFeatures; |
| 114 | + int maxCatCount; // 0 in case of numerical features | |
| 115 | + int featSize; // 1 in case of simple features (HAAR, LBP) and N_BINS(9)*N_CELLS(4) in case of Dalal's HOG features | |
| 116 | + | |
| 167 | 117 | cv::Size winSize; |
| 168 | - cv::Mat cls, sum; | |
| 169 | 118 | }; |
| 170 | 119 | |
| 171 | -inline uchar LBPTrainingEvaluator::Feature::calc(const cv::Mat &_sum, size_t y) const | |
| 120 | +inline uchar FeatureEvaluator::Feature::calc(const cv::Mat &data, int y) const | |
| 172 | 121 | { |
| 173 | - const int* psum = _sum.ptr<int>((int)y); | |
| 174 | - int cval = psum[p[5]] - psum[p[6]] - psum[p[9]] + psum[p[10]]; | |
| 175 | - | |
| 176 | - return (uchar)((psum[p[0]] - psum[p[1]] - psum[p[4]] + psum[p[5]] >= cval ? 128 : 0) | // 0 | |
| 177 | - (psum[p[1]] - psum[p[2]] - psum[p[5]] + psum[p[6]] >= cval ? 64 : 0) | // 1 | |
| 178 | - (psum[p[2]] - psum[p[3]] - psum[p[6]] + psum[p[7]] >= cval ? 32 : 0) | // 2 | |
| 179 | - (psum[p[6]] - psum[p[7]] - psum[p[10]] + psum[p[11]] >= cval ? 16 : 0) | // 5 | |
| 180 | - (psum[p[10]] - psum[p[11]] - psum[p[14]] + psum[p[15]] >= cval ? 8 : 0) | // 8 | |
| 181 | - (psum[p[9]] - psum[p[10]] - psum[p[13]] + psum[p[14]] >= cval ? 4 : 0) | // 7 | |
| 182 | - (psum[p[8]] - psum[p[9]] - psum[p[12]] + psum[p[13]] >= cval ? 2 : 0) | // 6 | |
| 183 | - (psum[p[4]] - psum[p[5]] - psum[p[8]] + psum[p[9]] >= cval ? 1 : 0)); // 3 | |
| 122 | + const int* ptr = data.ptr<int>(y); | |
| 123 | + int cval = ptr[p[5]] - ptr[p[6]] - ptr[p[9]] + ptr[p[10]]; | |
| 124 | + | |
| 125 | + return (uchar)((ptr[p[0]] - ptr[p[1]] - ptr[p[4]] + ptr[p[5]] >= cval ? 128 : 0) | // 0 | |
| 126 | + (ptr[p[1]] - ptr[p[2]] - ptr[p[5]] + ptr[p[6]] >= cval ? 64 : 0) | // 1 | |
| 127 | + (ptr[p[2]] - ptr[p[3]] - ptr[p[6]] + ptr[p[7]] >= cval ? 32 : 0) | // 2 | |
| 128 | + (ptr[p[6]] - ptr[p[7]] - ptr[p[10]] + ptr[p[11]] >= cval ? 16 : 0) | // 5 | |
| 129 | + (ptr[p[10]] - ptr[p[11]] - ptr[p[14]] + ptr[p[15]] >= cval ? 8 : 0) | // 8 | |
| 130 | + (ptr[p[9]] - ptr[p[10]] - ptr[p[13]] + ptr[p[14]] >= cval ? 4 : 0) | // 7 | |
| 131 | + (ptr[p[8]] - ptr[p[9]] - ptr[p[12]] + ptr[p[13]] >= cval ? 2 : 0) | // 6 | |
| 132 | + (ptr[p[4]] - ptr[p[5]] - ptr[p[8]] + ptr[p[9]] >= cval ? 1 : 0)); // 3 | |
| 184 | 133 | } |
| 185 | 134 | |
| 186 | 135 | } // namespace br | ... | ... |
openbr/plugins/metadata/cascade.cpp
| ... | ... | @@ -80,20 +80,18 @@ class CascadeTransform : public MetaTransform |
| 80 | 80 | Q_PROPERTY(int winHeight READ get_winHeight WRITE set_winHeight RESET reset_winHeight STORED false) |
| 81 | 81 | Q_PROPERTY(int numPos READ get_numPos WRITE set_numPos RESET reset_numPos STORED false) |
| 82 | 82 | Q_PROPERTY(int numNeg READ get_numNeg WRITE set_numNeg RESET reset_numNeg STORED false) |
| 83 | - Q_PROPERTY(int numStages READ get_numStages WRITE set_numStages RESET reset_numStages STORED false) | |
| 84 | 83 | |
| 85 | 84 | BR_PROPERTY(QString, model, "FrontalFace") |
| 86 | 85 | BR_PROPERTY(int, minSize, 64) |
| 87 | 86 | BR_PROPERTY(int, minNeighbors, 5) |
| 88 | 87 | BR_PROPERTY(bool, ROCMode, false) |
| 89 | 88 | |
| 90 | - BR_PROPERTY(QString, vecFile, "vec.vec") | |
| 89 | + BR_PROPERTY(QString, vecFile, "data.vec") | |
| 91 | 90 | BR_PROPERTY(QString, negFile, "neg.txt") |
| 92 | 91 | BR_PROPERTY(int, winWidth, 24) |
| 93 | 92 | BR_PROPERTY(int, winHeight, 24) |
| 94 | 93 | BR_PROPERTY(int, numPos, 1000) |
| 95 | 94 | BR_PROPERTY(int, numNeg, 1000) |
| 96 | - BR_PROPERTY(int, numStages, 20) | |
| 97 | 95 | |
| 98 | 96 | Resource<CascadeClassifier> cascadeResource; |
| 99 | 97 | |
| ... | ... | @@ -180,22 +178,21 @@ class CascadeTransform : public MetaTransform |
| 180 | 178 | // Train transform |
| 181 | 179 | void train(const TemplateList& data) |
| 182 | 180 | { |
| 183 | - (void) data; | |
| 181 | + (void)data; | |
| 182 | + | |
| 183 | + BrCascadeClassifier classifier; | |
| 184 | 184 | |
| 185 | 185 | QList<Mat> posImages = getPos(); |
| 186 | 186 | QList<Mat> negImages = getNeg(); |
| 187 | 187 | |
| 188 | - BrCascadeClassifier classifier; | |
| 189 | - | |
| 190 | 188 | CascadeBoostParams stageParams(CvBoost::GENTLE, 0.999, 0.5, 0.95, 1, 200); |
| 191 | 189 | |
| 192 | - Representation *representation = Representation::make("MBLBP(24,24)", NULL); | |
| 193 | - | |
| 194 | 190 | QString cascadeDir = Globals->sdkPath + "/share/openbr/models/openbrcascades/" + model; |
| 195 | 191 | classifier.train(cascadeDir.toStdString(), |
| 196 | 192 | posImages, negImages, |
| 197 | - 1024, 1024, numPos, numNeg, numStages, | |
| 198 | - representation, stageParams); | |
| 193 | + numPos, numNeg, 1024, 1024, 12, | |
| 194 | + Size(winWidth, winHeight), | |
| 195 | + stageParams); | |
| 199 | 196 | } |
| 200 | 197 | |
| 201 | 198 | void project(const Template &src, Template &dst) const | ... | ... |
openbr/plugins/representation/mblbp.cpp
| 1 | 1 | #include <opencv2/imgproc/imgproc.hpp> |
| 2 | 2 | |
| 3 | 3 | #include <openbr/plugins/openbr_internal.h> |
| 4 | +#include <openbr/core/opencvutils.h> | |
| 4 | 5 | |
| 5 | 6 | using namespace cv; |
| 6 | 7 | |
| ... | ... | @@ -29,12 +30,12 @@ class MBLBPRepresentation : public Representation |
| 29 | 30 | void init() |
| 30 | 31 | { |
| 31 | 32 | int offset = winWidth + 1; |
| 32 | - for( int x = 0; x < winWidth; x++ ) | |
| 33 | - for( int y = 0; y < winHeight; y++ ) | |
| 34 | - for( int w = 1; w <= winWidth / 3; w++ ) | |
| 35 | - for( int h = 1; h <= winHeight / 3; h++ ) | |
| 36 | - if ( (x+3*w <= winWidth) && (y+3*h <= winHeight) ) | |
| 37 | - features.push_back( Feature(offset, x, y, w, h ) ); | |
| 33 | + for (int x = 0; x < winWidth; x++ ) | |
| 34 | + for (int y = 0; y < winHeight; y++ ) | |
| 35 | + for (int w = 1; w <= winWidth / 3; w++ ) | |
| 36 | + for (int h = 1; h <= winHeight / 3; h++ ) | |
| 37 | + if ((x+3*w <= winWidth) && (y+3*h <= winHeight) ) | |
| 38 | + features.append(Feature(offset, x, y, w, h ) ); | |
| 38 | 39 | } |
| 39 | 40 | |
| 40 | 41 | Mat preprocess(const Mat &image) const | ... | ... |