class CV_EXPORTS_W NormalBayesClassifier : public StatModel
{
public:
- class CV_EXPORTS_W_MAP Params
+ class CV_EXPORTS_W Params
{
public:
Params();
public:
Params(int defaultK=10, bool isclassifier=true);
- int defaultK;
- bool isclassifier;
+ CV_PROP_RW int defaultK;
+ CV_PROP_RW bool isclassifier;
};
virtual void setParams(const Params& p) = 0;
virtual Params getParams() const = 0;
explicit Params(int nclusters=DEFAULT_NCLUSTERS, int covMatType=EM::COV_MAT_DIAGONAL,
const TermCriteria& termCrit=TermCriteria(TermCriteria::COUNT+TermCriteria::EPS,
EM::DEFAULT_MAX_ITERS, 1e-6));
- int nclusters;
- int covMatType;
- TermCriteria termCrit;
+ CV_PROP_RW int nclusters;
+ CV_PROP_RW int covMatType;
+ CV_PROP_RW TermCriteria termCrit;
};
virtual void setParams(const Params& p) = 0;
void clear()
{
min_val = max_val = min_val1 = max_val1 = 0.;
- rng = RNG(-1);
+ rng = RNG((uint64)-1);
weights.clear();
trained = false;
}
{
int maxIdx[] = {0, 0};
minMaxIdx(outputs, 0, 0, 0, maxIdx);
- return maxIdx[0] + maxIdx[1];
+ return (float)(maxIdx[0] + maxIdx[1]);
}
return 0.f;
train_backprop( inputs, outputs, sw, termcrit ) :
train_rprop( inputs, outputs, sw, termcrit );
- trained = true;
-
- return iter;
+ trained = iter > 0;
+ return trained;
}
int train_backprop( const Mat& inputs, const Mat& outputs, const Mat& _sw, TermCriteria termCrit )
void updateWeightsAndTrim( int treeidx, vector<int>& sidx )
{
+ putchar('<');
int i, n = (int)w->sidx.size();
int nvars = (int)varIdx.size();
double sumw = 0., C = 1.;
- cv::AutoBuffer<double> buf(n*3 + nvars);
+ cv::AutoBuffer<double> buf(n + nvars);
double* result = buf;
- float* sbuf = (float*)(result + n*3);
+ float* sbuf = (float*)(result + n);
Mat sample(1, nvars, CV_32F, sbuf);
int predictFlags = bparams.boostType == Boost::DISCRETE ? (PREDICT_MAX_VOTE | RAW_OUTPUT) : PREDICT_SUM;
predictFlags |= COMPRESSED_INPUT;
if( w->sample_weights[si] >= threshold )
sidx.push_back(si);
}
+ putchar('>'); fflush(stdout);
}
float predictTrees( const Range& range, const Mat& sample, int flags0 ) const
varType.create(1, nvars, CV_8U);
varType = Scalar::all(VAR_ORDERED);
if( noutputvars == 1 )
- varType.at<uchar>(ninputvars) = responses.type() < CV_32F ? VAR_CATEGORICAL : VAR_ORDERED;
+ varType.at<uchar>(ninputvars) = (uchar)(responses.type() < CV_32F ? VAR_CATEGORICAL : VAR_ORDERED);
}
if( noutputvars > 1 )
if( tp == VAR_MISSED )
haveMissed = true;
rowvals.push_back(val);
- rowtypes.push_back(tp);
+ rowtypes.push_back((uchar)tp);
token = strtok(NULL, delimiters);
if (!token)
break;
if( s )
{
j = s[i];
- CV_DbgAssert( 0 <= j && j < nsamples );
+ CV_Assert( 0 <= j && j < nsamples );
}
values[i] = src[j*sstep];
if( values[i] == MISSED_VAL )
if( vptr )
{
j = vptr[i];
- CV_DbgAssert( 0 <= j && j < nvars );
+ CV_Assert( 0 <= j && j < nvars );
}
buf[i] = src[j*vstep];
}
if( _resp.needed() )
resp.copyTo(_resp);
- return err / n * (isclassifier ? 100 : 1);
+ return (float)(err / n * (isclassifier ? 100 : 1));
}
void StatModel::save(const String& filename) const
}
float result = 0.f;
- float inv_scale = 1./k;
+ float inv_scale = 1.f/k;
for( testidx = 0; testidx < testcount; testidx++ )
{
termCrit.type |= TermCriteria::EPS;
termCrit.epsilon = epsilon;
}
- int iters = (double)fn["iterations"];
+ int iters = (int)fn["iterations"];
if( iters > 0 )
{
termCrit.type |= TermCriteria::COUNT;
}
int class_idx;
- int Tn;
+ double Tn;
double value;
int parent;
}
int varIdx;
- int inversed;
+ bool inversed;
float quality;
int next;
float c;
vector<WNode> wnodes;
vector<WSplit> wsplits;
vector<int> wsubsets;
- vector<int> cv_Tn;
+ vector<double> cv_Tn;
vector<double> cv_node_risk;
vector<double> cv_node_error;
vector<int> cv_labels;
{
DTreesImpl::clear();
oobError = 0.;
- rng = RNG(-1);
+ rng = RNG((uint64)-1);
}
const vector<int>& getActiveVars()
for( treeidx = 0; treeidx < ntrees; treeidx++ )
{
- putchar('.'); fflush(stdout);
for( i = 0; i < n; i++ )
oobmask[i] = (uchar)1;
bool balanced )
{
int svmType = params.svmType;
- RNG rng(-1);
+ RNG rng((uint64)-1);
if( svmType == ONE_CLASS )
// current implementation of "auto" svm does not support the 1-class case.
{
split.varIdx = vi;
split.c = (values[sorted_idx[best_i]] + values[sorted_idx[best_i+1]])*0.5f;
- split.inversed = 0;
+ split.inversed = false;
split.quality = (float)best_val;
}
return split;
cv::AutoBuffer<double> buf(n + k);
double *v_weights = buf, *c_weights = buf + n;
bool modified = true;
- RNG r(-1);
+ RNG r((uint64)-1);
// assign labels randomly
for( i = 0; i < n; i++ )
{
- int sum = 0;
+ double sum = 0;
const double* v = vectors + i*m;
labels[i] = i < k ? i : r.uniform(0, k);
{
split.varIdx = vi;
split.c = (values[sorted_idx[best_i]] + values[sorted_idx[best_i+1]])*0.5f;
- split.inversed = 0;
+ split.inversed = false;
split.quality = (float)best_val;
}
return split;