2 #include "cascadeclassifier.h"
9 const double eps = 1e-5;
11 val = max( val, eps );
12 val = min( val, 1. - eps );
13 return log( val/(1. - val) );
16 #define CV_CMP_FLT(i,j) (i < j)
17 static CV_IMPLEMENT_QSORT_EX( icvSortFlt, float, CV_CMP_FLT, const float* )
19 #define CV_CMP_NUM_IDX(i,j) (aux[i] < aux[j])
20 static CV_IMPLEMENT_QSORT_EX( icvSortIntAux, int, CV_CMP_NUM_IDX, const float* )
21 static CV_IMPLEMENT_QSORT_EX( icvSortUShAux, unsigned short, CV_CMP_NUM_IDX, const float* )
23 #define CV_THRESHOLD_EPS (0.00001F)
25 static const int MinBlockSize = 1 << 16;
26 static const int BlockSizeDelta = 1 << 10;
28 //----------------------------- CascadeBoostParams -------------------------------------------------
30 CvCascadeBoostParams::CvCascadeBoostParams() : minHitRate( 0.995F), maxFalseAlarm( 0.5F )
32 boost_type = CvBoost::GENTLE;
33 use_surrogates = use_1se_rule = truncate_pruned_tree = false;
36 CvCascadeBoostParams::CvCascadeBoostParams( int _boostType,
37 float _minHitRate, float _maxFalseAlarm,
38 double _weightTrimRate, int _maxDepth, int _maxWeakCount ) :
39 CvBoostParams( _boostType, _maxWeakCount, _weightTrimRate, _maxDepth, false, 0 )
41 boost_type = CvBoost::GENTLE;
42 minHitRate = _minHitRate;
43 maxFalseAlarm = _maxFalseAlarm;
44 use_surrogates = use_1se_rule = truncate_pruned_tree = false;
47 void CvCascadeBoostParams::write( FileStorage &fs ) const
49 String boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
50 boost_type == CvBoost::REAL ? CC_REAL_BOOST :
51 boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
52 boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : String();
53 CV_Assert( !boostTypeStr.empty() );
54 fs << CC_BOOST_TYPE << boostTypeStr;
55 fs << CC_MINHITRATE << minHitRate;
56 fs << CC_MAXFALSEALARM << maxFalseAlarm;
57 fs << CC_TRIM_RATE << weight_trim_rate;
58 fs << CC_MAX_DEPTH << max_depth;
59 fs << CC_WEAK_COUNT << weak_count;
62 bool CvCascadeBoostParams::read( const FileNode &node )
65 FileNode rnode = node[CC_BOOST_TYPE];
66 rnode >> boostTypeStr;
67 boost_type = !boostTypeStr.compare( CC_DISCRETE_BOOST ) ? CvBoost::DISCRETE :
68 !boostTypeStr.compare( CC_REAL_BOOST ) ? CvBoost::REAL :
69 !boostTypeStr.compare( CC_LOGIT_BOOST ) ? CvBoost::LOGIT :
70 !boostTypeStr.compare( CC_GENTLE_BOOST ) ? CvBoost::GENTLE : -1;
72 CV_Error( CV_StsBadArg, "unsupported Boost type" );
73 node[CC_MINHITRATE] >> minHitRate;
74 node[CC_MAXFALSEALARM] >> maxFalseAlarm;
75 node[CC_TRIM_RATE] >> weight_trim_rate ;
76 node[CC_MAX_DEPTH] >> max_depth ;
77 node[CC_WEAK_COUNT] >> weak_count ;
78 if ( minHitRate <= 0 || minHitRate > 1 ||
79 maxFalseAlarm <= 0 || maxFalseAlarm > 1 ||
80 weight_trim_rate <= 0 || weight_trim_rate > 1 ||
81 max_depth <= 0 || weak_count <= 0 )
82 CV_Error( CV_StsBadArg, "bad parameters range");
86 void CvCascadeBoostParams::printDefaults() const
88 cout << "--boostParams--" << endl;
89 cout << " [-bt <{" << CC_DISCRETE_BOOST << ", "
90 << CC_REAL_BOOST << ", "
91 << CC_LOGIT_BOOST ", "
92 << CC_GENTLE_BOOST << "(default)}>]" << endl;
93 cout << " [-minHitRate <min_hit_rate> = " << minHitRate << ">]" << endl;
94 cout << " [-maxFalseAlarmRate <max_false_alarm_rate = " << maxFalseAlarm << ">]" << endl;
95 cout << " [-weightTrimRate <weight_trim_rate = " << weight_trim_rate << ">]" << endl;
96 cout << " [-maxDepth <max_depth_of_weak_tree = " << max_depth << ">]" << endl;
97 cout << " [-maxWeakCount <max_weak_tree_count = " << weak_count << ">]" << endl;
100 void CvCascadeBoostParams::printAttrs() const
102 String boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
103 boost_type == CvBoost::REAL ? CC_REAL_BOOST :
104 boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
105 boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : String();
106 CV_Assert( !boostTypeStr.empty() );
107 cout << "boostType: " << boostTypeStr << endl;
108 cout << "minHitRate: " << minHitRate << endl;
109 cout << "maxFalseAlarmRate: " << maxFalseAlarm << endl;
110 cout << "weightTrimRate: " << weight_trim_rate << endl;
111 cout << "maxTreeDepth: " << max_depth << endl;
112 cout << "maxWeakCount: " << weak_count << endl;
115 bool CvCascadeBoostParams::scanAttr( const String prmName, const String val)
119 if( !prmName.compare( "-bt" ) )
121 boost_type = !val.compare( CC_DISCRETE_BOOST ) ? CvBoost::DISCRETE :
122 !val.compare( CC_REAL_BOOST ) ? CvBoost::REAL :
123 !val.compare( CC_LOGIT_BOOST ) ? CvBoost::LOGIT :
124 !val.compare( CC_GENTLE_BOOST ) ? CvBoost::GENTLE : -1;
125 if (boost_type == -1)
128 else if( !prmName.compare( "-minHitRate" ) )
130 minHitRate = (float) atof( val.c_str() );
132 else if( !prmName.compare( "-maxFalseAlarmRate" ) )
134 weight_trim_rate = (float) atof( val.c_str() );
136 else if( !prmName.compare( "-weightTrimRate" ) )
138 weight_trim_rate = (float) atof( val.c_str() );
140 else if( !prmName.compare( "-maxDepth" ) )
142 max_depth = atoi( val.c_str() );
144 else if( !prmName.compare( "-maxWeakCount" ) )
146 weak_count = atoi( val.c_str() );
154 //---------------------------- CascadeBoostTrainData -----------------------------
156 CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _featureEvaluator,
157 const CvDTreeParams& _params )
159 is_classifier = true;
160 var_all = var_count = (int)_featureEvaluator->getNumFeatures();
162 featureEvaluator = _featureEvaluator;
164 set_params( _params );
165 max_c_count = MAX( 2, featureEvaluator->getMaxCatCount() );
166 var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 );
167 if ( featureEvaluator->getMaxCatCount() > 0 )
170 cat_var_count = var_count;
172 for( int vi = 0; vi < var_count; vi++ )
174 var_type->data.i[vi] = vi;
180 ord_var_count = var_count;
181 for( int vi = 1; vi <= var_count; vi++ )
183 var_type->data.i[vi-1] = -vi;
186 var_type->data.i[var_count] = cat_var_count;
187 var_type->data.i[var_count+1] = cat_var_count+1;
189 int maxSplitSize = cvAlign(sizeof(CvDTreeSplit) + (MAX(0,max_c_count - 33)/32)*sizeof(int),sizeof(void*));
190 int treeBlockSize = MAX((int)sizeof(CvDTreeNode)*8, maxSplitSize);
191 treeBlockSize = MAX(treeBlockSize + BlockSizeDelta, MinBlockSize);
192 tree_storage = cvCreateMemStorage( treeBlockSize );
193 node_heap = cvCreateSet( 0, sizeof(node_heap[0]), sizeof(CvDTreeNode), tree_storage );
194 split_heap = cvCreateSet( 0, sizeof(split_heap[0]), maxSplitSize, tree_storage );
197 CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _featureEvaluator,
199 int _precalcValBufSize, int _precalcIdxBufSize,
200 const CvDTreeParams& _params )
202 setData( _featureEvaluator, _numSamples, _precalcValBufSize, _precalcIdxBufSize, _params );
205 void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluator,
207 int _precalcValBufSize, int _precalcIdxBufSize,
208 const CvDTreeParams& _params )
211 unsigned short* udst = 0;
217 is_classifier = true;
221 set_params( _params );
223 CV_Assert( _featureEvaluator );
224 featureEvaluator = _featureEvaluator;
226 max_c_count = MAX( 2, featureEvaluator->getMaxCatCount() );
227 _resp = featureEvaluator->getCls();
229 // TODO: check responses: elements must be 0 or 1
231 if( _precalcValBufSize < 0 || _precalcIdxBufSize < 0)
232 CV_Error( CV_StsOutOfRange, "_numPrecalcVal and _numPrecalcIdx must be positive or 0" );
234 var_count = var_all = featureEvaluator->getNumFeatures();
235 sample_count = _numSamples;
238 if (sample_count < 65536)
241 numPrecalcVal = min( (_precalcValBufSize*1048576) / int(sizeof(float)*sample_count), var_count );
242 numPrecalcIdx = min( (_precalcIdxBufSize*1048576) /
243 int((is_buf_16u ? sizeof(unsigned short) : sizeof (int))*sample_count), var_count );
245 int maxNumThreads = 1;
247 maxNumThreads = cv::getNumThreads();
249 valCache.create( max(numPrecalcVal, maxNumThreads), sample_count, CV_32FC1 );
250 var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 );
252 if ( featureEvaluator->getMaxCatCount() > 0 )
255 cat_var_count = var_count;
257 for( int vi = 0; vi < var_count; vi++ )
259 var_type->data.i[vi] = vi;
265 ord_var_count = var_count;
266 for( int vi = 1; vi <= var_count; vi++ )
268 var_type->data.i[vi-1] = -vi;
271 var_type->data.i[var_count] = cat_var_count;
272 var_type->data.i[var_count+1] = cat_var_count+1;
273 work_var_count = ( cat_var_count ? var_count : numPrecalcIdx ) + 1;
274 buf_size = (work_var_count + 1) * sample_count;
278 buf = cvCreateMat( buf_count, buf_size, CV_16UC1 );
280 buf = cvCreateMat( buf_count, buf_size, CV_32SC1 );
282 cat_count = cvCreateMat( 1, cat_var_count + 1, CV_32SC1 );
283 pred_float_buf.resize(maxNumThreads);
284 pred_int_buf.resize(maxNumThreads);
285 resp_float_buf.resize(maxNumThreads);
286 resp_int_buf.resize(maxNumThreads);
287 cv_lables_buf.resize(maxNumThreads);
288 sample_idx_buf.resize(maxNumThreads);
289 for( int ti = 0; ti < maxNumThreads; ti++ )
291 pred_float_buf[ti].resize(sample_count);
292 pred_int_buf[ti].resize(sample_count);
293 resp_float_buf[ti].resize(sample_count);
294 resp_int_buf[ti].resize(sample_count);
295 cv_lables_buf[ti].resize(sample_count);
296 sample_idx_buf[ti].resize(sample_count);
299 // precalculate valCache and set indices in buf
302 // now calculate the maximum size of split,
303 // create memory storage that will keep nodes and splits of the decision tree
304 // allocate root node and the buffer for the whole training data
305 int maxSplitSize = cvAlign(sizeof(CvDTreeSplit) +
306 (MAX(0,sample_count - 33)/32)*sizeof(int),sizeof(void*));
307 int treeBlockSize = MAX((int)sizeof(CvDTreeNode)*8, maxSplitSize);
308 treeBlockSize = MAX(treeBlockSize + BlockSizeDelta, MinBlockSize);
309 tree_storage = cvCreateMemStorage( treeBlockSize );
310 node_heap = cvCreateSet( 0, sizeof(*node_heap), sizeof(CvDTreeNode), tree_storage );
312 int nvSize = var_count*sizeof(int);
313 nvSize = cvAlign(MAX( nvSize, (int)sizeof(CvSetElem) ), sizeof(void*));
314 int tempBlockSize = nvSize;
315 tempBlockSize = MAX( tempBlockSize + BlockSizeDelta, MinBlockSize );
316 temp_storage = cvCreateMemStorage( tempBlockSize );
317 nv_heap = cvCreateSet( 0, sizeof(*nv_heap), nvSize, temp_storage );
319 data_root = new_node( 0, sample_count, 0, 0 );
323 udst = (unsigned short*)(buf->data.s + work_var_count*sample_count);
325 idst = buf->data.i + work_var_count*sample_count;
327 for (int si = 0; si < sample_count; si++)
330 udst[si] = (unsigned short)si;
334 for( int vi = 0; vi < var_count; vi++ )
335 data_root->set_num_valid(vi, sample_count);
336 for( int vi = 0; vi < cat_var_count; vi++ )
337 cat_count->data.i[vi] = max_c_count;
339 cat_count->data.i[cat_var_count] = 2;
341 maxSplitSize = cvAlign(sizeof(CvDTreeSplit) +
342 (MAX(0,max_c_count - 33)/32)*sizeof(int),sizeof(void*));
343 split_heap = cvCreateSet( 0, sizeof(*split_heap), maxSplitSize, tree_storage );
345 priors = cvCreateMat( 1, get_num_classes(), CV_64F );
346 cvSet(priors, cvScalar(1));
347 priors_mult = cvCloneMat( priors );
348 counts = cvCreateMat( 1, get_num_classes(), CV_32SC1 );
349 direction = cvCreateMat( 1, sample_count, CV_8UC1 );
350 split_buf = cvCreateMat( 1, sample_count, CV_32SC1 );
353 void CvCascadeBoostTrainData::free_train_data()
355 CvDTreeTrainData::free_train_data();
359 void CvCascadeBoostTrainData::get_class_labels( CvDTreeNode* n, int* labelsBuf, const int** labels )
361 int nodeSampleCount = n->sample_count;
362 int* sampleIndicesBuf = get_sample_idx_buf();
363 const int* sampleIndices = 0;
364 int rStep = CV_IS_MAT_CONT( responses->type ) ? 1 : responses->step / CV_ELEM_SIZE( responses->type );
366 get_sample_indices(n, sampleIndicesBuf, &sampleIndices);
368 for( int si = 0; si < nodeSampleCount; si++ )
370 int sidx = sampleIndices[si];
371 labelsBuf[si] = (int)responses->data.fl[sidx*rStep];
376 void CvCascadeBoostTrainData::get_sample_indices( CvDTreeNode* n, int* indicesBuf, const int** indices )
378 CvDTreeTrainData::get_cat_var_data( n, get_work_var_count(), indicesBuf, indices );
381 void CvCascadeBoostTrainData::get_cv_labels( CvDTreeNode* n, int* labels_buf, const int** labels )
383 CvDTreeTrainData::get_cat_var_data( n, get_work_var_count()- 1, labels_buf, labels );
386 int CvCascadeBoostTrainData::get_ord_var_data( CvDTreeNode* n, int vi, float* ordValuesBuf, int* indicesBuf,
387 const float** ordValues, const int** indices )
389 int nodeSampleCount = n->sample_count;
390 int* sampleIndicesBuf = get_sample_idx_buf();
391 const int* sampleIndices = 0;
392 get_sample_indices(n, sampleIndicesBuf, &sampleIndices);
394 if ( vi < numPrecalcIdx )
397 *indices = buf->data.i + n->buf_idx*buf->cols + vi*sample_count + n->offset;
400 const unsigned short* shortIndices = (const unsigned short*)(buf->data.s + n->buf_idx*buf->cols +
401 vi*sample_count + n->offset );
402 for( int i = 0; i < nodeSampleCount; i++ )
403 indicesBuf[i] = shortIndices[i];
404 *indices = indicesBuf;
407 if ( vi < numPrecalcVal )
409 for( int i = 0; i < nodeSampleCount; i++ )
411 int idx = (*indices)[i];
412 idx = sampleIndices[idx];
413 ordValuesBuf[i] = valCache.at<float>( vi, idx);
418 for( int i = 0; i < nodeSampleCount; i++ )
420 int idx = (*indices)[i];
421 idx = sampleIndices[idx];
422 ordValuesBuf[i] = (*featureEvaluator)( vi, idx);
426 else // vi >= numPrecalcIdx
428 // use sample_indices as temporary buffer for values
429 if ( vi < numPrecalcVal )
431 for( int i = 0; i < nodeSampleCount; i++ )
434 ((float*)sampleIndices)[i] = valCache.at<float>( vi, sampleIndices[i] );
439 for( int i = 0; i < nodeSampleCount; i++ )
442 ((float*)sampleIndices)[i] = (*featureEvaluator)( vi, sampleIndices[i]);
445 icvSortIntAux( indicesBuf, sample_count, (float *)sampleIndices );
446 for( int i = 0; i < nodeSampleCount; i++ )
447 ordValuesBuf[i] = ((float*)sampleIndices)[indicesBuf[i]];
448 *indices = indicesBuf;
451 *ordValues = ordValuesBuf;
455 int CvCascadeBoostTrainData::get_cat_var_data( CvDTreeNode* n, int vi, int* catValuesBuf, const int** catValues )
457 int nodeSampleCount = n->sample_count;
458 int* sampleIndicesBuf = get_sample_idx_buf();
459 const int* sampleIndices = 0;
460 get_sample_indices(n, sampleIndicesBuf, &sampleIndices);
462 if ( vi < numPrecalcVal )
464 for( int i = 0; i < nodeSampleCount; i++ )
465 catValuesBuf[i] = (int) valCache.at<float>( vi, sampleIndices[i]);
469 for( int i = 0; i < nodeSampleCount; i++ )
470 catValuesBuf[i] = (int)(*featureEvaluator)( vi, sampleIndices[i] );
472 *catValues = catValuesBuf;
477 float CvCascadeBoostTrainData::getVarValue( int vi, int si )
479 if ( vi < numPrecalcVal && !valCache.empty() )
480 return valCache.at<float>( vi, si );
481 return (*featureEvaluator)( vi, si );
484 void CvCascadeBoostTrainData::precalculate()
486 int minNum = MIN( numPrecalcVal, numPrecalcIdx);
487 unsigned short* udst = (unsigned short*)buf->data.s;
488 int* idst = buf->data.i;
490 CV_DbgAssert( !valCache.empty() );
491 double proctime = -TIME( 0 );
494 int maxNumThreads = cv::getNumThreads();
495 #pragma omp parallel for num_threads(maxNumThreads) schedule(dynamic)
497 for ( int fi = numPrecalcVal; fi < numPrecalcIdx; fi++)
499 int threadID = getThreadNum();
500 for( int si = 0; si < sample_count; si++ )
502 valCache.ptr<float>(threadID)[si] = (*featureEvaluator)( fi, si );
504 *(udst + fi*sample_count + si) = (unsigned short)si;
506 *(idst + fi*sample_count + si) = si;
509 icvSortUShAux( udst + fi*sample_count, sample_count, valCache.ptr<float>(threadID) );
511 icvSortIntAux( idst + fi*sample_count, sample_count, valCache.ptr<float>(threadID) );
514 #pragma omp parallel for num_threads(maxNumThreads) schedule(dynamic)
516 for ( int fi = 0; fi < minNum; fi++)
518 for( int si = 0; si < sample_count; si++ )
520 valCache.ptr<float>(fi)[si] = (*featureEvaluator)( fi, si );
522 *(udst + fi*sample_count + si) = (unsigned short)si;
524 *(idst + fi*sample_count + si) = si;
527 icvSortUShAux( udst + fi*sample_count, sample_count, (float*)valCache.data );
529 icvSortIntAux( idst + fi*sample_count, sample_count, (float*)valCache.data );
532 #pragma omp parallel for num_threads(maxNumThreads) schedule(dynamic)
534 for ( int fi = minNum; fi < numPrecalcVal; fi++)
535 for( int si = 0; si < sample_count; si++ )
536 valCache.ptr<float>(fi)[si] = (*featureEvaluator)( fi, si );
538 cout << "Precalculation time: " << (proctime + TIME( 0 )) << endl;
541 //-------------------------------- CascadeBoostTree ----------------------------------------
543 CvDTreeNode* CvCascadeBoostTree::predict( int sampleIdx ) const
545 CvDTreeNode* node = root;
547 CV_Error( CV_StsError, "The tree has not been trained yet" );
549 if ( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount() == 0 ) // ordered
553 CvDTreeSplit* split = node->split;
554 float val = ((CvCascadeBoostTrainData*)data)->getVarValue( split->var_idx, sampleIdx );
555 node = val <= split->ord.c ? node->left : node->right;
562 CvDTreeSplit* split = node->split;
563 int c = (int)((CvCascadeBoostTrainData*)data)->getVarValue( split->var_idx, sampleIdx );
564 node = CV_DTREE_CAT_DIR(c, split->subset) < 0 ? node->left : node->right;
570 void CvCascadeBoostTree::write( FileStorage &fs, const Mat& featureMap )
572 int maxCatCount = ((CvCascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount();
573 int subsetN = (maxCatCount + 31)/32;
574 queue<CvDTreeNode*> internalNodesQueue;
575 int size = (int)pow( 2.f, (float)ensemble->get_params().max_depth);
576 Ptr<float> leafVals = new float[size];
578 int internalNodeIdx = 1;
579 CvDTreeNode* tempNode;
581 CV_DbgAssert( root );
582 internalNodesQueue.push( root );
585 fs << CC_INTERNAL_NODES << "[:";
586 while (!internalNodesQueue.empty())
588 tempNode = internalNodesQueue.front();
589 CV_Assert( tempNode->left );
590 if ( !tempNode->left->left && !tempNode->left->right) // left node is leaf
592 leafVals[-leafValIdx] = (float)tempNode->left->value;
597 internalNodesQueue.push( tempNode->left );
598 fs << internalNodeIdx++;
600 CV_Assert( tempNode->right );
601 if ( !tempNode->right->left && !tempNode->right->right) // right node is leaf
603 leafVals[-leafValIdx] = (float)tempNode->right->value;
608 internalNodesQueue.push( tempNode->right );
609 fs << internalNodeIdx++;
611 int fidx = tempNode->split->var_idx;
612 fidx = featureMap.empty() ? fidx : featureMap.at<int>(0, fidx);
615 fs << tempNode->split->ord.c;
617 for( int i = 0; i < subsetN; i++ )
618 fs << tempNode->split->subset[i];
619 internalNodesQueue.pop();
621 fs << "]"; // CC_INTERNAL_NODES
623 fs << CC_LEAF_VALUES << "[:";
624 for (int ni = 0; ni < -leafValIdx; ni++)
626 fs << "]"; // CC_LEAF_VALUES
630 void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble,
631 CvDTreeTrainData* _data )
633 int maxCatCount = ((CvCascadeBoostTrainData*)_data)->featureEvaluator->getMaxCatCount();
634 int subsetN = (maxCatCount + 31)/32;
635 int step = 3 + ( maxCatCount>0 ? subsetN : 1 );
637 queue<CvDTreeNode*> internalNodesQueue;
638 FileNodeIterator internalNodesIt, leafValsuesIt;
639 CvDTreeNode* prntNode, *cldNode;
643 ensemble = _ensemble;
647 FileNode rnode = node[CC_INTERNAL_NODES];
648 internalNodesIt = rnode.end();
649 leafValsuesIt = node[CC_LEAF_VALUES].end();
650 internalNodesIt--; leafValsuesIt--;
651 for( size_t i = 0; i < rnode.size()/step; i++ )
653 prntNode = data->new_node( 0, 0, 0, 0 );
654 if ( maxCatCount > 0 )
656 prntNode->split = data->new_split_cat( 0, 0 );
657 for( int j = subsetN-1; j>=0; j--)
659 *internalNodesIt >> prntNode->split->subset[j]; internalNodesIt--;
665 *internalNodesIt >> split_value; internalNodesIt--;
666 prntNode->split = data->new_split_ord( 0, split_value, 0, 0, 0);
668 *internalNodesIt >> prntNode->split->var_idx; internalNodesIt--;
670 *internalNodesIt >> ridx; internalNodesIt--;
671 *internalNodesIt >> lidx;internalNodesIt--;
674 prntNode->right = cldNode = data->new_node( 0, 0, 0, 0 );
675 *leafValsuesIt >> cldNode->value; leafValsuesIt--;
676 cldNode->parent = prntNode;
680 prntNode->right = internalNodesQueue.front();
681 prntNode->right->parent = prntNode;
682 internalNodesQueue.pop();
687 prntNode->left = cldNode = data->new_node( 0, 0, 0, 0 );
688 *leafValsuesIt >> cldNode->value; leafValsuesIt--;
689 cldNode->parent = prntNode;
693 prntNode->left = internalNodesQueue.front();
694 prntNode->left->parent = prntNode;
695 internalNodesQueue.pop();
698 internalNodesQueue.push( prntNode );
701 root = internalNodesQueue.front();
702 internalNodesQueue.pop();
705 void CvCascadeBoostTree::split_node_data( CvDTreeNode* node )
707 int n = node->sample_count, nl, nr, scount = data->sample_count;
708 char* dir = (char*)data->direction->data.ptr;
709 CvDTreeNode *left = 0, *right = 0;
710 int* newIdx = data->split_buf->data.i;
711 int newBufIdx = data->get_child_buf_idx( node );
712 int workVarCount = data->get_work_var_count();
713 CvMat* buf = data->buf;
714 int* tempBuf = (int*)cvStackAlloc(n*sizeof(tempBuf[0]));
717 complete_node_dir(node);
719 for( int i = nl = nr = 0; i < n; i++ )
722 // initialize new indices for splitting ordered variables
723 newIdx[i] = (nl & (d-1)) | (nr & -d); // d ? ri : li
728 node->left = left = data->new_node( node, nl, newBufIdx, node->offset );
729 node->right = right = data->new_node( node, nr, newBufIdx, node->offset + nl );
731 splitInputData = node->depth + 1 < data->params.max_depth &&
732 (node->left->sample_count > data->params.min_sample_count ||
733 node->right->sample_count > data->params.min_sample_count);
735 // split ordered variables, keep both halves sorted.
736 for( int vi = 0; vi < ((CvCascadeBoostTrainData*)data)->numPrecalcIdx; vi++ )
738 int ci = data->get_var_type(vi);
739 int n1 = node->get_num_valid(vi);
740 int *src_idx_buf = data->get_pred_int_buf();
741 const int* src_idx = 0;
742 float *src_val_buf = data->get_pred_float_buf();
743 const float* src_val = 0;
745 if( ci >= 0 || !splitInputData )
748 data->get_ord_var_data(node, vi, src_val_buf, src_idx_buf, &src_val, &src_idx);
750 for(int i = 0; i < n; i++)
751 tempBuf[i] = src_idx[i];
753 if (data->is_buf_16u)
755 unsigned short *ldst, *rdst, *ldst0, *rdst0;
756 ldst0 = ldst = (unsigned short*)(buf->data.s + left->buf_idx*buf->cols +
757 vi*scount + left->offset);
758 rdst0 = rdst = (unsigned short*)(ldst + nl);
761 for( int i = 0; i < n1; i++ )
763 int idx = tempBuf[i];
768 *rdst = (unsigned short)idx;
773 *ldst = (unsigned short)idx;
779 left->set_num_valid(vi, (int)(ldst - ldst0));
780 right->set_num_valid(vi, (int)(rdst - rdst0));
784 int *ldst0, *ldst, *rdst0, *rdst;
785 ldst0 = ldst = buf->data.i + left->buf_idx*buf->cols +
786 vi*scount + left->offset;
787 rdst0 = rdst = buf->data.i + right->buf_idx*buf->cols +
788 vi*scount + right->offset;
791 for( int i = 0; i < n1; i++ )
793 int idx = tempBuf[i];
808 left->set_num_valid(vi, (int)(ldst - ldst0));
809 right->set_num_valid(vi, (int)(rdst - rdst0));
814 // split cv_labels using newIdx relocation table
815 int *src_lbls_buf = data->get_pred_int_buf();
816 const int* src_lbls = 0;
817 data->get_cv_labels(node, src_lbls_buf, &src_lbls);
819 for(int i = 0; i < n; i++)
820 tempBuf[i] = src_lbls[i];
822 if (data->is_buf_16u)
824 unsigned short *ldst = (unsigned short *)(buf->data.s + left->buf_idx*buf->cols +
825 (workVarCount-1)*scount + left->offset);
826 unsigned short *rdst = (unsigned short *)(buf->data.s + right->buf_idx*buf->cols +
827 (workVarCount-1)*scount + right->offset);
829 for( int i = 0; i < n; i++ )
831 int idx = tempBuf[i];
834 *rdst = (unsigned short)idx;
839 *ldst = (unsigned short)idx;
847 int *ldst = buf->data.i + left->buf_idx*buf->cols +
848 (workVarCount-1)*scount + left->offset;
849 int *rdst = buf->data.i + right->buf_idx*buf->cols +
850 (workVarCount-1)*scount + right->offset;
852 for( int i = 0; i < n; i++ )
854 int idx = tempBuf[i];
867 for( int vi = 0; vi < data->var_count; vi++ )
869 left->set_num_valid(vi, (int)(nl));
870 right->set_num_valid(vi, (int)(nr));
873 // split sample indices
874 int *sampleIdx_src_buf = data->get_sample_idx_buf();
875 const int* sampleIdx_src = 0;
876 data->get_sample_indices(node, sampleIdx_src_buf, &sampleIdx_src);
878 for(int i = 0; i < n; i++)
879 tempBuf[i] = sampleIdx_src[i];
881 if (data->is_buf_16u)
883 unsigned short* ldst = (unsigned short*)(buf->data.s + left->buf_idx*buf->cols +
884 workVarCount*scount + left->offset);
885 unsigned short* rdst = (unsigned short*)(buf->data.s + right->buf_idx*buf->cols +
886 workVarCount*scount + right->offset);
887 for (int i = 0; i < n; i++)
889 unsigned short idx = (unsigned short)tempBuf[i];
904 int* ldst = buf->data.i + left->buf_idx*buf->cols +
905 workVarCount*scount + left->offset;
906 int* rdst = buf->data.i + right->buf_idx*buf->cols +
907 workVarCount*scount + right->offset;
908 for (int i = 0; i < n; i++)
910 int idx = tempBuf[i];
924 // deallocate the parent node data that is not needed anymore
925 data->free_node_data(node);
928 void auxMarkFeaturesInMap( const CvDTreeNode* node, Mat& featureMap)
930 if ( node && node->split )
932 featureMap.ptr<int>(0)[node->split->var_idx] = 1;
933 auxMarkFeaturesInMap( node->left, featureMap );
934 auxMarkFeaturesInMap( node->right, featureMap );
938 void CvCascadeBoostTree::markFeaturesInMap( Mat& featureMap )
940 auxMarkFeaturesInMap( root, featureMap );
943 //----------------------------------- CascadeBoost --------------------------------------
945 bool CvCascadeBoost::train( const CvFeatureEvaluator* _featureEvaluator,
947 int _precalcValBufSize, int _precalcIdxBufSize,
948 const CvCascadeBoostParams& _params )
952 data = new CvCascadeBoostTrainData( _featureEvaluator, _numSamples,
953 _precalcValBufSize, _precalcIdxBufSize, _params );
954 CvMemStorage *storage = cvCreateMemStorage();
955 weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
958 set_params( _params );
959 if ( (_params.boost_type == LOGIT) || (_params.boost_type == GENTLE) )
960 data->do_responses_copy();
964 cout << "+----+---------+---------+" << endl;
965 cout << "| N | HR | FA |" << endl;
966 cout << "+----+---------+---------+" << endl;
970 CvCascadeBoostTree* tree = new CvCascadeBoostTree;
971 if( !tree->train( data, subsample_mask, this ) )
973 // TODO: may be should finish the loop (!!!)
978 cvSeqPush( weak, &tree );
979 update_weights( tree );
982 while( !isErrDesired() && (weak->total < params.weak_count) );
984 data->is_classifier = true;
985 data->free_train_data();
989 float CvCascadeBoost::predict( int sampleIdx, bool returnSum ) const
994 cvStartReadSeq( weak, &reader );
995 cvSetSeqReaderPos( &reader, 0 );
996 for( int i = 0; i < weak->total; i++ )
999 CV_READ_SEQ_ELEM( wtree, reader );
1000 sum += ((CvCascadeBoostTree*)wtree)->predict(sampleIdx)->value;
1003 sum = sum < threshold - CV_THRESHOLD_EPS ? 0.0 : 1.0;
1007 bool CvCascadeBoost::set_params( const CvBoostParams& _params )
1009 minHitRate = ((CvCascadeBoostParams&)_params).minHitRate;
1010 maxFalseAlarm = ((CvCascadeBoostParams&)_params).maxFalseAlarm;
1011 return ( ( minHitRate > 0 ) && ( minHitRate < 1) &&
1012 ( maxFalseAlarm > 0 ) && ( maxFalseAlarm < 1) &&
1013 CvBoost::set_params( _params ));
1016 void CvCascadeBoost::update_weights( CvBoostTree* tree )
1018 int n = data->sample_count;
1023 const int* sampleIdx = 0;
1024 if ( (params.boost_type == LOGIT) || (params.boost_type == GENTLE) )
1026 step = CV_IS_MAT_CONT(data->responses_copy->type) ?
1027 1 : data->responses_copy->step / CV_ELEM_SIZE(data->responses_copy->type);
1028 fdata = data->responses_copy->data.fl;
1029 sampleIdxBuf = (int*)cvStackAlloc(data->sample_count*sizeof(sampleIdxBuf[0]));
1030 data->get_sample_indices( data->data_root, sampleIdxBuf, &sampleIdx );
1032 CvMat* buf = data->buf;
1033 if( !tree ) // before training the first tree, initialize weights and other parameters
1035 int* classLabelsBuf = data->get_resp_int_buf();
1036 const int* classLabels = 0;
1037 data->get_class_labels(data->data_root, classLabelsBuf, &classLabels);
1038 // in case of logitboost and gentle adaboost each weak tree is a regression tree,
1039 // so we need to convert class labels to floating-point values
1040 float* responses_buf = data->get_resp_float_buf();
1041 const float* responses = 0;
1042 data->get_ord_responses(data->data_root, responses_buf, &responses);
1045 double p[2] = { 1, 1 };
1047 cvReleaseMat( &orig_response );
1048 cvReleaseMat( &sum_response );
1049 cvReleaseMat( &weak_eval );
1050 cvReleaseMat( &subsample_mask );
1051 cvReleaseMat( &weights );
1053 orig_response = cvCreateMat( 1, n, CV_32S );
1054 weak_eval = cvCreateMat( 1, n, CV_64F );
1055 subsample_mask = cvCreateMat( 1, n, CV_8U );
1056 weights = cvCreateMat( 1, n, CV_64F );
1057 subtree_weights = cvCreateMat( 1, n + 2, CV_64F );
1059 if (data->is_buf_16u)
1061 unsigned short* labels = (unsigned short*)(buf->data.s + data->data_root->buf_idx*buf->cols +
1062 data->data_root->offset + (data->work_var_count-1)*data->sample_count);
1063 for( int i = 0; i < n; i++ )
1065 // save original categorical responses {0,1}, convert them to {-1,1}
1066 orig_response->data.i[i] = classLabels[i]*2 - 1;
1067 // make all the samples active at start.
1068 // later, in trim_weights() deactivate/reactive again some, if need
1069 subsample_mask->data.ptr[i] = (uchar)1;
1070 // make all the initial weights the same.
1071 weights->data.db[i] = w0*p[classLabels[i]];
1072 // set the labels to find (from within weak tree learning proc)
1073 // the particular sample weight, and where to store the response.
1074 labels[i] = (unsigned short)i;
1079 int* labels = buf->data.i + data->data_root->buf_idx*buf->cols +
1080 data->data_root->offset + (data->work_var_count-1)*data->sample_count;
1082 for( int i = 0; i < n; i++ )
1084 // save original categorical responses {0,1}, convert them to {-1,1}
1085 orig_response->data.i[i] = classLabels[i]*2 - 1;
1086 subsample_mask->data.ptr[i] = (uchar)1;
1087 weights->data.db[i] = w0*p[classLabels[i]];
1092 if( params.boost_type == LOGIT )
1094 sum_response = cvCreateMat( 1, n, CV_64F );
1096 for( int i = 0; i < n; i++ )
1098 sum_response->data.db[i] = 0;
1099 fdata[sampleIdx[i]*step] = orig_response->data.i[i] > 0 ? 2.f : -2.f;
1102 // in case of logitboost each weak tree is a regression tree.
1103 // the target function values are recalculated for each of the trees
1104 data->is_classifier = false;
1106 else if( params.boost_type == GENTLE )
1108 for( int i = 0; i < n; i++ )
1109 fdata[sampleIdx[i]*step] = (float)orig_response->data.i[i];
1111 data->is_classifier = false;
1116 // at this moment, for all the samples that participated in the training of the most
1117 // recent weak classifier we know the responses. For other samples we need to compute them
1118 if( have_subsample )
1120 // invert the subsample mask
1121 cvXorS( subsample_mask, cvScalar(1.), subsample_mask );
1123 // run tree through all the non-processed samples
1124 for( int i = 0; i < n; i++ )
1125 if( subsample_mask->data.ptr[i] )
1127 weak_eval->data.db[i] = ((CvCascadeBoostTree*)tree)->predict( i )->value;
1131 // now update weights and other parameters for each type of boosting
1132 if( params.boost_type == DISCRETE )
1134 // Discrete AdaBoost:
1135 // weak_eval[i] (=f(x_i)) is in {-1,1}
1136 // err = sum(w_i*(f(x_i) != y_i))/sum(w_i)
1137 // C = log((1-err)/err)
1138 // w_i *= exp(C*(f(x_i) != y_i))
1141 double scale[] = { 1., 0. };
1143 for( int i = 0; i < n; i++ )
1145 double w = weights->data.db[i];
1147 err += w*(weak_eval->data.db[i] != orig_response->data.i[i]);
1152 C = err = -logRatio( err );
1153 scale[1] = exp(err);
1156 for( int i = 0; i < n; i++ )
1158 double w = weights->data.db[i]*
1159 scale[weak_eval->data.db[i] != orig_response->data.i[i]];
1161 weights->data.db[i] = w;
1166 else if( params.boost_type == REAL )
1169 // weak_eval[i] = f(x_i) = 0.5*log(p(x_i)/(1-p(x_i))), p(x_i)=P(y=1|x_i)
1170 // w_i *= exp(-y_i*f(x_i))
1172 for( int i = 0; i < n; i++ )
1173 weak_eval->data.db[i] *= -orig_response->data.i[i];
1175 cvExp( weak_eval, weak_eval );
1177 for( int i = 0; i < n; i++ )
1179 double w = weights->data.db[i]*weak_eval->data.db[i];
1181 weights->data.db[i] = w;
1184 else if( params.boost_type == LOGIT )
1187 // weak_eval[i] = f(x_i) in [-z_max,z_max]
1188 // sum_response = F(x_i).
1189 // F(x_i) += 0.5*f(x_i)
1190 // p(x_i) = exp(F(x_i))/(exp(F(x_i)) + exp(-F(x_i))=1/(1+exp(-2*F(x_i)))
1191 // reuse weak_eval: weak_eval[i] <- p(x_i)
1192 // w_i = p(x_i)*1(1 - p(x_i))
1193 // z_i = ((y_i+1)/2 - p(x_i))/(p(x_i)*(1 - p(x_i)))
1194 // store z_i to the data->data_root as the new target responses
1196 const double lbWeightThresh = FLT_EPSILON;
1197 const double lbZMax = 10.;
1198 float* responsesBuf = data->get_resp_float_buf();
1199 const float* responses = 0;
1200 data->get_ord_responses(data->data_root, responsesBuf, &responses);
1202 for( int i = 0; i < n; i++ )
1204 double s = sum_response->data.db[i] + 0.5*weak_eval->data.db[i];
1205 sum_response->data.db[i] = s;
1206 weak_eval->data.db[i] = -2*s;
1209 cvExp( weak_eval, weak_eval );
1211 for( int i = 0; i < n; i++ )
1213 double p = 1./(1. + weak_eval->data.db[i]);
1214 double w = p*(1 - p), z;
1215 w = MAX( w, lbWeightThresh );
1216 weights->data.db[i] = w;
1218 if( orig_response->data.i[i] > 0 )
1221 fdata[sampleIdx[i]*step] = (float)min(z, lbZMax);
1226 fdata[sampleIdx[i]*step] = (float)-min(z, lbZMax);
1233 // weak_eval[i] = f(x_i) in [-1,1]
1234 // w_i *= exp(-y_i*f(x_i))
1235 assert( params.boost_type == GENTLE );
1237 for( int i = 0; i < n; i++ )
1238 weak_eval->data.db[i] *= -orig_response->data.i[i];
1240 cvExp( weak_eval, weak_eval );
1242 for( int i = 0; i < n; i++ )
1244 double w = weights->data.db[i] * weak_eval->data.db[i];
1245 weights->data.db[i] = w;
1251 // renormalize weights
1252 if( sumW > FLT_EPSILON )
1255 for( int i = 0; i < n; ++i )
1256 weights->data.db[i] *= sumW;
1260 bool CvCascadeBoost::isErrDesired()
1262 int sCount = data->sample_count,
1263 numPos = 0, numNeg = 0, numFalse = 0, numPosTrue = 0;
1264 float* eval = (float*) cvStackAlloc( sizeof(eval[0]) * sCount );
1266 for( int i = 0; i < sCount; i++ )
1267 if( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getCls( i ) == 1.0F )
1268 eval[numPos++] = predict( i, true );
1269 icvSortFlt( eval, numPos, 0 );
1270 int thresholdIdx = (int)((1.0F - minHitRate) * numPos);
1271 threshold = eval[ thresholdIdx ];
1272 numPosTrue = numPos - thresholdIdx;
1273 for( int i = thresholdIdx - 1; i >= 0; i--)
1274 if ( abs( eval[i] - threshold) < FLT_EPSILON )
1276 float hitRate = ((float) numPosTrue) / ((float) numPos);
1278 for( int i = 0; i < sCount; i++ )
1280 if( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getCls( i ) == 0.0F )
1287 float falseAlarm = ((float) numFalse) / ((float) numNeg);
1289 cout << "|"; cout.width(4); cout << right << weak->total;
1290 cout << "|"; cout.width(9); cout << right << hitRate;
1291 cout << "|"; cout.width(9); cout << right << falseAlarm;
1292 cout << "|" << endl;
1293 cout << "+----+---------+---------+" << endl;
1295 return falseAlarm <= maxFalseAlarm;
1298 void CvCascadeBoost::write( FileStorage &fs, const Mat& featureMap ) const
1301 CvCascadeBoostTree* weakTree;
1302 fs << CC_WEAK_COUNT << weak->total;
1303 fs << CC_STAGE_THRESHOLD << threshold;
1304 fs << CC_WEAK_CLASSIFIERS << "[";
1305 for( int wi = 0; wi < weak->total; wi++)
1307 /*sprintf( cmnt, "tree %i", wi );
1308 cvWriteComment( fs, cmnt, 0 );*/
1309 weakTree = *((CvCascadeBoostTree**) cvGetSeqElem( weak, wi ));
1310 weakTree->write( fs, featureMap );
1315 bool CvCascadeBoost::read( const FileNode &node,
1316 const CvFeatureEvaluator* _featureEvaluator,
1317 const CvCascadeBoostParams& _params )
1319 CvMemStorage* storage;
1321 data = new CvCascadeBoostTrainData( _featureEvaluator, _params );
1322 set_params( _params );
1324 node[CC_STAGE_THRESHOLD] >> threshold;
1325 FileNode rnode = node[CC_WEAK_CLASSIFIERS];
1327 storage = cvCreateMemStorage();
1328 weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
1329 for( FileNodeIterator it = rnode.begin(); it != rnode.end(); it++ )
1331 CvCascadeBoostTree* tree = new CvCascadeBoostTree();
1332 tree->read( *it, this, data );
1333 cvSeqPush( weak, &tree );
1338 void CvCascadeBoost::markUsedFeaturesInMap( Mat& featureMap )
1340 for( int wi = 0; wi < weak->total; wi++ )
1342 CvCascadeBoostTree* weakTree = *((CvCascadeBoostTree**) cvGetSeqElem( weak, wi ));
1343 weakTree->markFeaturesInMap( featureMap );