1 #include "opencv2/core/core_c.h"
2 #include "opencv2/ml/ml.hpp"
7 The sample demonstrates how to train Random Trees classifier
8 (or Boosting classifier, or MLP - see main()) using the provided dataset.
10 We use the sample database letter-recognition.data
11 from UCI Repository, here is the link:
13 Newman, D.J. & Hettich, S. & Blake, C.L. & Merz, C.J. (1998).
14 UCI Repository of machine learning databases
15 [http://www.ics.uci.edu/~mlearn/MLRepository.html].
16 Irvine, CA: University of California, Department of Information and Computer Science.
18 The dataset consists of 20000 feature vectors along with the
19 responses - capital latin letters A..Z.
20 The first 16000 (10000 for boosting)) samples are used for training
21 and the remaining 4000 (10000 for boosting) - to test the classifier.
24 // This function reads data and responses from the file <filename>
26 read_num_class_data( const char* filename, int var_count,
27 CvMat** data, CvMat** responses )
30 FILE* f = fopen( filename, "rt" );
31 CvMemStorage* storage;
41 el_ptr = new float[var_count+1];
42 storage = cvCreateMemStorage();
43 seq = cvCreateSeq( 0, sizeof(*seq), (var_count+1)*sizeof(float), storage );
48 if( !fgets( buf, M, f ) || !strchr( buf, ',' ) )
52 for( i = 1; i <= var_count; i++ )
55 sscanf( ptr, "%f%n", el_ptr + i, &n );
60 cvSeqPush( seq, el_ptr );
64 *data = cvCreateMat( seq->total, var_count, CV_32F );
65 *responses = cvCreateMat( seq->total, 1, CV_32F );
67 cvStartReadSeq( seq, &reader );
69 for( i = 0; i < seq->total; i++ )
71 const float* sdata = (float*)reader.ptr + 1;
72 float* ddata = data[0]->data.fl + var_count*i;
73 float* dr = responses[0]->data.fl + i;
75 for( j = 0; j < var_count; j++ )
78 CV_NEXT_SEQ_ELEM( seq->elem_size, reader );
81 cvReleaseMemStorage( &storage );
87 int build_rtrees_classifier( char* data_filename,
88 char* filename_to_save, char* filename_to_load )
93 CvMat* sample_idx = 0;
95 int ok = read_num_class_data( data_filename, 16, &data, &responses );
96 int nsamples_all = 0, ntrain_samples = 0;
98 double train_hr = 0, test_hr = 0;
100 CvMat* var_importance = 0;
104 printf( "Could not read the database %s\n", data_filename );
108 printf( "The database %s is loaded.\n", data_filename );
109 nsamples_all = data->rows;
110 ntrain_samples = (int)(nsamples_all*0.8);
112 // Create or load Random Trees classifier
113 if( filename_to_load )
115 // load classifier from the specified file
116 forest.load( filename_to_load );
118 if( forest.get_tree_count() == 0 )
120 printf( "Could not read the classifier %s\n", filename_to_load );
123 printf( "The classifier %s is loaded.\n", data_filename );
127 // create classifier by using <data> and <responses>
128 printf( "Training the classifier ...\n");
130 // 1. create type mask
131 var_type = cvCreateMat( data->cols + 1, 1, CV_8U );
132 cvSet( var_type, cvScalarAll(CV_VAR_ORDERED) );
133 cvSetReal1D( var_type, data->cols, CV_VAR_CATEGORICAL );
135 // 2. create sample_idx
136 sample_idx = cvCreateMat( 1, nsamples_all, CV_8UC1 );
139 cvGetCols( sample_idx, &mat, 0, ntrain_samples );
140 cvSet( &mat, cvRealScalar(1) );
142 cvGetCols( sample_idx, &mat, ntrain_samples, nsamples_all );
146 // 3. train classifier
147 forest.train( data, CV_ROW_SAMPLE, responses, 0, sample_idx, var_type, 0,
148 CvRTParams(10,10,0,false,15,0,true,4,100,0.01f,CV_TERMCRIT_ITER));
152 // compute prediction error on train and test data
153 for( i = 0; i < nsamples_all; i++ )
157 cvGetRow( data, &sample, i );
159 r = forest.predict( &sample );
160 r = fabs((double)r - responses->data.fl[i]) <= FLT_EPSILON ? 1 : 0;
162 if( i < ntrain_samples )
168 test_hr /= (double)(nsamples_all-ntrain_samples);
169 train_hr /= (double)ntrain_samples;
170 printf( "Recognition rate: train = %.1f%%, test = %.1f%%\n",
171 train_hr*100., test_hr*100. );
173 printf( "Number of trees: %d\n", forest.get_tree_count() );
175 // Print variable importance
176 var_importance = (CvMat*)forest.get_var_importance();
179 double rt_imp_sum = cvSum( var_importance ).val[0];
180 printf("var#\timportance (in %%):\n");
181 for( i = 0; i < var_importance->cols; i++ )
182 printf( "%-2d\t%-4.1f\n", i,
183 100.f*var_importance->data.fl[i]/rt_imp_sum);
186 //Print some proximitites
187 printf( "Proximities between some samples corresponding to the letter 'T':\n" );
189 CvMat sample1, sample2;
190 const int pairs[][2] = {{0,103}, {0,106}, {106,103}, {-1,-1}};
192 for( i = 0; pairs[i][0] >= 0; i++ )
194 cvGetRow( data, &sample1, pairs[i][0] );
195 cvGetRow( data, &sample2, pairs[i][1] );
196 printf( "proximity(%d,%d) = %.1f%%\n", pairs[i][0], pairs[i][1],
197 forest.get_proximity( &sample1, &sample2 )*100. );
201 // Save Random Trees classifier to file if needed
202 if( filename_to_save )
203 forest.save( filename_to_save );
205 cvReleaseMat( &sample_idx );
206 cvReleaseMat( &var_type );
207 cvReleaseMat( &data );
208 cvReleaseMat( &responses );
215 int build_boost_classifier( char* data_filename,
216 char* filename_to_save, char* filename_to_load )
218 const int class_count = 26;
220 CvMat* responses = 0;
222 CvMat* temp_sample = 0;
223 CvMat* weak_responses = 0;
225 int ok = read_num_class_data( data_filename, 16, &data, &responses );
226 int nsamples_all = 0, ntrain_samples = 0;
229 double train_hr = 0, test_hr = 0;
234 printf( "Could not read the database %s\n", data_filename );
238 printf( "The database %s is loaded.\n", data_filename );
239 nsamples_all = data->rows;
240 ntrain_samples = (int)(nsamples_all*0.5);
241 var_count = data->cols;
243 // Create or load Boosted Tree classifier
244 if( filename_to_load )
246 // load classifier from the specified file
247 boost.load( filename_to_load );
249 if( !boost.get_weak_predictors() )
251 printf( "Could not read the classifier %s\n", filename_to_load );
254 printf( "The classifier %s is loaded.\n", data_filename );
258 // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
260 // As currently boosted tree classifier in MLL can only be trained
261 // for 2-class problems, we transform the training database by
262 // "unrolling" each training sample as many times as the number of
263 // classes (26) that we have.
265 // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
267 CvMat* new_data = cvCreateMat( ntrain_samples*class_count, var_count + 1, CV_32F );
268 CvMat* new_responses = cvCreateMat( ntrain_samples*class_count, 1, CV_32S );
270 // 1. unroll the database type mask
271 printf( "Unrolling the database...\n");
272 for( i = 0; i < ntrain_samples; i++ )
274 float* data_row = (float*)(data->data.ptr + data->step*i);
275 for( j = 0; j < class_count; j++ )
277 float* new_data_row = (float*)(new_data->data.ptr +
278 new_data->step*(i*class_count+j));
279 for( k = 0; k < var_count; k++ )
280 new_data_row[k] = data_row[k];
281 new_data_row[var_count] = (float)j;
282 new_responses->data.i[i*class_count + j] = responses->data.fl[i] == j+'A';
286 // 2. create type mask
287 var_type = cvCreateMat( var_count + 2, 1, CV_8U );
288 cvSet( var_type, cvScalarAll(CV_VAR_ORDERED) );
289 // the last indicator variable, as well
290 // as the new (binary) response are categorical
291 cvSetReal1D( var_type, var_count, CV_VAR_CATEGORICAL );
292 cvSetReal1D( var_type, var_count+1, CV_VAR_CATEGORICAL );
294 // 3. train classifier
295 printf( "Training the classifier (may take a few minutes)...\n");
296 boost.train( new_data, CV_ROW_SAMPLE, new_responses, 0, 0, var_type, 0,
297 CvBoostParams(CvBoost::REAL, 100, 0.95, 5, false, 0 ));
298 cvReleaseMat( &new_data );
299 cvReleaseMat( &new_responses );
303 temp_sample = cvCreateMat( 1, var_count + 1, CV_32F );
304 weak_responses = cvCreateMat( 1, boost.get_weak_predictors()->total, CV_32F );
306 // compute prediction error on train and test data
307 for( i = 0; i < nsamples_all; i++ )
310 double max_sum = -DBL_MAX;
313 cvGetRow( data, &sample, i );
314 for( k = 0; k < var_count; k++ )
315 temp_sample->data.fl[k] = sample.data.fl[k];
317 for( j = 0; j < class_count; j++ )
319 temp_sample->data.fl[var_count] = (float)j;
320 boost.predict( temp_sample, 0, weak_responses );
321 double sum = cvSum( weak_responses ).val[0];
325 best_class = j + 'A';
329 r = fabs(best_class - responses->data.fl[i]) < FLT_EPSILON ? 1 : 0;
331 if( i < ntrain_samples )
337 test_hr /= (double)(nsamples_all-ntrain_samples);
338 train_hr /= (double)ntrain_samples;
339 printf( "Recognition rate: train = %.1f%%, test = %.1f%%\n",
340 train_hr*100., test_hr*100. );
342 printf( "Number of trees: %d\n", boost.get_weak_predictors()->total );
344 // Save classifier to file if needed
345 if( filename_to_save )
346 boost.save( filename_to_save );
348 cvReleaseMat( &temp_sample );
349 cvReleaseMat( &weak_responses );
350 cvReleaseMat( &var_type );
351 cvReleaseMat( &data );
352 cvReleaseMat( &responses );
359 int build_mlp_classifier( char* data_filename,
360 char* filename_to_save, char* filename_to_load )
362 const int class_count = 26;
365 CvMat* responses = 0;
366 CvMat* mlp_response = 0;
368 int ok = read_num_class_data( data_filename, 16, &data, &responses );
369 int nsamples_all = 0, ntrain_samples = 0;
371 double train_hr = 0, test_hr = 0;
376 printf( "Could not read the database %s\n", data_filename );
380 printf( "The database %s is loaded.\n", data_filename );
381 nsamples_all = data->rows;
382 ntrain_samples = (int)(nsamples_all*0.8);
384 // Create or load MLP classifier
385 if( filename_to_load )
387 // load classifier from the specified file
388 mlp.load( filename_to_load );
390 if( !mlp.get_layer_count() )
392 printf( "Could not read the classifier %s\n", filename_to_load );
395 printf( "The classifier %s is loaded.\n", data_filename );
399 // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
401 // MLP does not support categorical variables by explicitly.
402 // So, instead of the output class label, we will use
403 // a binary vector of <class_count> components for training and,
404 // therefore, MLP will give us a vector of "probabilities" at the
407 // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
409 CvMat* new_responses = cvCreateMat( ntrain_samples, class_count, CV_32F );
411 // 1. unroll the responses
412 printf( "Unrolling the responses...\n");
413 for( i = 0; i < ntrain_samples; i++ )
415 int cls_label = cvRound(responses->data.fl[i]) - 'A';
416 float* bit_vec = (float*)(new_responses->data.ptr + i*new_responses->step);
417 for( j = 0; j < class_count; j++ )
419 bit_vec[cls_label] = 1.f;
421 cvGetRows( data, &train_data, 0, ntrain_samples );
423 // 2. train classifier
424 int layer_sz[] = { data->cols, 100, 100, class_count };
426 cvMat( 1, (int)(sizeof(layer_sz)/sizeof(layer_sz[0])), CV_32S, layer_sz );
427 mlp.create( &layer_sizes );
428 printf( "Training the classifier (may take a few minutes)...\n");
429 mlp.train( &train_data, new_responses, 0, 0,
430 CvANN_MLP_TrainParams(cvTermCriteria(CV_TERMCRIT_ITER,300,0.01),
432 CvANN_MLP_TrainParams::BACKPROP,0.001));
434 CvANN_MLP_TrainParams::RPROP,0.05));
436 cvReleaseMat( &new_responses );
440 mlp_response = cvCreateMat( 1, class_count, CV_32F );
442 // compute prediction error on train and test data
443 for( i = 0; i < nsamples_all; i++ )
447 cvGetRow( data, &sample, i );
448 CvPoint max_loc = {0,0};
449 mlp.predict( &sample, mlp_response );
450 cvMinMaxLoc( mlp_response, 0, 0, 0, &max_loc, 0 );
451 best_class = max_loc.x + 'A';
453 int r = fabs((double)best_class - responses->data.fl[i]) < FLT_EPSILON ? 1 : 0;
455 if( i < ntrain_samples )
461 test_hr /= (double)(nsamples_all-ntrain_samples);
462 train_hr /= (double)ntrain_samples;
463 printf( "Recognition rate: train = %.1f%%, test = %.1f%%\n",
464 train_hr*100., test_hr*100. );
466 // Save classifier to file if needed
467 if( filename_to_save )
468 mlp.save( filename_to_save );
470 cvReleaseMat( &mlp_response );
471 cvReleaseMat( &data );
472 cvReleaseMat( &responses );
478 int main( int argc, char *argv[] )
480 char* filename_to_save = 0;
481 char* filename_to_load = 0;
482 char default_data_filename[] = "./letter-recognition.data";
483 char* data_filename = default_data_filename;
487 for( i = 1; i < argc; i++ )
489 if( strcmp(argv[i],"-data") == 0 ) // flag "-data letter_recognition.xml"
492 data_filename = argv[i];
494 else if( strcmp(argv[i],"-save") == 0 ) // flag "-save filename.xml"
497 filename_to_save = argv[i];
499 else if( strcmp(argv[i],"-load") == 0) // flag "-load filename.xml"
502 filename_to_load = argv[i];
504 else if( strcmp(argv[i],"-boost") == 0)
508 else if( strcmp(argv[i],"-mlp") == 0 )
518 build_rtrees_classifier( data_filename, filename_to_save, filename_to_load ) :
520 build_boost_classifier( data_filename, filename_to_save, filename_to_load ) :
522 build_mlp_classifier( data_filename, filename_to_save, filename_to_load ) :
525 printf("This is letter recognition sample.\n"
526 "The usage: letter_recog [-data <path to letter-recognition.data>] \\\n"
527 " [-save <output XML file for the classifier>] \\\n"
528 " [-load <XML file with the pre-trained classifier>] \\\n"
529 " [-boost|-mlp] # to use boost/mlp classifier instead of default Random Trees\n" );