removed a couple of unnecessary comments in Logistic Regression training method
authorRahul Kavi <leorahul16@gmail.com>
Mon, 11 Nov 2013 20:29:19 +0000 (15:29 -0500)
committerMaksim Shabunin <maksim.shabunin@itseez.com>
Mon, 18 Aug 2014 15:06:58 +0000 (19:06 +0400)
modules/ml/src/lr.cpp

index 09acf9d..2411ea3 100644 (file)
@@ -159,7 +159,6 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip)
     if(num_classes == 2)
     {
         labels_l.convertTo(labels, CV_32F);
-        // new_theta = compute_batch_gradient(data_t, labels, init_theta);
         //currently supported training methods LogisticRegression::BATCH and LogisticRegression::MINI_BATCH
         if(this->params.train_method == LogisticRegression::BATCH)
             new_theta = compute_batch_gradient(data_t, labels, init_theta);
@@ -177,7 +176,6 @@ bool LogisticRegression::train(cv::InputArray data_ip, cv::InputArray labels_ip)
         {
             new_local_labels = (labels_l == it->second)/255;
             new_local_labels.convertTo(labels, CV_32F);
-            // new_theta = compute_batch_gradient(data_t, labels, init_theta);
             // currently supported training methods LogisticRegression::BATCH and LogisticRegression::MINI_BATCH
             if(this->params.train_method == LogisticRegression::BATCH)
                 new_theta = compute_batch_gradient(data_t, labels, init_theta);