001//
002// This file is auto-generated. Please don't modify it!
003//
004package org.opencv.ml;
005
006import org.opencv.core.Mat;
007import org.opencv.core.TermCriteria;
008import org.opencv.ml.ParamGrid;
009import org.opencv.ml.SVM;
010import org.opencv.ml.StatModel;
011
012// C++: class SVM
013/**
014 * Support Vector Machines.
015 *
016 * SEE: REF: ml_intro_svm
017 */
018public class SVM extends StatModel {
019
020    protected SVM(long addr) { super(addr); }
021
022    // internal usage only
023    public static SVM __fromPtr__(long addr) { return new SVM(addr); }
024
025    // C++: enum KernelTypes (cv.ml.SVM.KernelTypes)
026    public static final int
027            CUSTOM = -1,
028            LINEAR = 0,
029            POLY = 1,
030            RBF = 2,
031            SIGMOID = 3,
032            CHI2 = 4,
033            INTER = 5;
034
035
036    // C++: enum ParamTypes (cv.ml.SVM.ParamTypes)
037    public static final int
038            C = 0,
039            GAMMA = 1,
040            P = 2,
041            NU = 3,
042            COEF = 4,
043            DEGREE = 5;
044
045
046    // C++: enum Types (cv.ml.SVM.Types)
047    public static final int
048            C_SVC = 100,
049            NU_SVC = 101,
050            ONE_CLASS = 102,
051            EPS_SVR = 103,
052            NU_SVR = 104;
053
054
055    //
056    // C++:  int cv::ml::SVM::getType()
057    //
058
059    /**
060     * SEE: setType
061     * @return automatically generated
062     */
063    public int getType() {
064        return getType_0(nativeObj);
065    }
066
067
068    //
069    // C++:  void cv::ml::SVM::setType(int val)
070    //
071
072    /**
073     *  getType SEE: getType
074     * @param val automatically generated
075     */
076    public void setType(int val) {
077        setType_0(nativeObj, val);
078    }
079
080
081    //
082    // C++:  double cv::ml::SVM::getGamma()
083    //
084
085    /**
086     * SEE: setGamma
087     * @return automatically generated
088     */
089    public double getGamma() {
090        return getGamma_0(nativeObj);
091    }
092
093
094    //
095    // C++:  void cv::ml::SVM::setGamma(double val)
096    //
097
098    /**
099     *  getGamma SEE: getGamma
100     * @param val automatically generated
101     */
102    public void setGamma(double val) {
103        setGamma_0(nativeObj, val);
104    }
105
106
107    //
108    // C++:  double cv::ml::SVM::getCoef0()
109    //
110
111    /**
112     * SEE: setCoef0
113     * @return automatically generated
114     */
115    public double getCoef0() {
116        return getCoef0_0(nativeObj);
117    }
118
119
120    //
121    // C++:  void cv::ml::SVM::setCoef0(double val)
122    //
123
124    /**
125     *  getCoef0 SEE: getCoef0
126     * @param val automatically generated
127     */
128    public void setCoef0(double val) {
129        setCoef0_0(nativeObj, val);
130    }
131
132
133    //
134    // C++:  double cv::ml::SVM::getDegree()
135    //
136
137    /**
138     * SEE: setDegree
139     * @return automatically generated
140     */
141    public double getDegree() {
142        return getDegree_0(nativeObj);
143    }
144
145
146    //
147    // C++:  void cv::ml::SVM::setDegree(double val)
148    //
149
150    /**
151     *  getDegree SEE: getDegree
152     * @param val automatically generated
153     */
154    public void setDegree(double val) {
155        setDegree_0(nativeObj, val);
156    }
157
158
159    //
160    // C++:  double cv::ml::SVM::getC()
161    //
162
163    /**
164     * SEE: setC
165     * @return automatically generated
166     */
167    public double getC() {
168        return getC_0(nativeObj);
169    }
170
171
172    //
173    // C++:  void cv::ml::SVM::setC(double val)
174    //
175
176    /**
177     *  getC SEE: getC
178     * @param val automatically generated
179     */
180    public void setC(double val) {
181        setC_0(nativeObj, val);
182    }
183
184
185    //
186    // C++:  double cv::ml::SVM::getNu()
187    //
188
189    /**
190     * SEE: setNu
191     * @return automatically generated
192     */
193    public double getNu() {
194        return getNu_0(nativeObj);
195    }
196
197
198    //
199    // C++:  void cv::ml::SVM::setNu(double val)
200    //
201
202    /**
203     *  getNu SEE: getNu
204     * @param val automatically generated
205     */
206    public void setNu(double val) {
207        setNu_0(nativeObj, val);
208    }
209
210
211    //
212    // C++:  double cv::ml::SVM::getP()
213    //
214
215    /**
216     * SEE: setP
217     * @return automatically generated
218     */
219    public double getP() {
220        return getP_0(nativeObj);
221    }
222
223
224    //
225    // C++:  void cv::ml::SVM::setP(double val)
226    //
227
228    /**
229     *  getP SEE: getP
230     * @param val automatically generated
231     */
232    public void setP(double val) {
233        setP_0(nativeObj, val);
234    }
235
236
237    //
238    // C++:  Mat cv::ml::SVM::getClassWeights()
239    //
240
241    /**
242     * SEE: setClassWeights
243     * @return automatically generated
244     */
245    public Mat getClassWeights() {
246        return new Mat(getClassWeights_0(nativeObj));
247    }
248
249
250    //
251    // C++:  void cv::ml::SVM::setClassWeights(Mat val)
252    //
253
254    /**
255     *  getClassWeights SEE: getClassWeights
256     * @param val automatically generated
257     */
258    public void setClassWeights(Mat val) {
259        setClassWeights_0(nativeObj, val.nativeObj);
260    }
261
262
263    //
264    // C++:  TermCriteria cv::ml::SVM::getTermCriteria()
265    //
266
267    /**
268     * SEE: setTermCriteria
269     * @return automatically generated
270     */
271    public TermCriteria getTermCriteria() {
272        return new TermCriteria(getTermCriteria_0(nativeObj));
273    }
274
275
276    //
277    // C++:  void cv::ml::SVM::setTermCriteria(TermCriteria val)
278    //
279
280    /**
281     *  getTermCriteria SEE: getTermCriteria
282     * @param val automatically generated
283     */
284    public void setTermCriteria(TermCriteria val) {
285        setTermCriteria_0(nativeObj, val.type, val.maxCount, val.epsilon);
286    }
287
288
289    //
290    // C++:  int cv::ml::SVM::getKernelType()
291    //
292
293    /**
294     * Type of a %SVM kernel.
295     * See SVM::KernelTypes. Default value is SVM::RBF.
296     * @return automatically generated
297     */
298    public int getKernelType() {
299        return getKernelType_0(nativeObj);
300    }
301
302
303    //
304    // C++:  void cv::ml::SVM::setKernel(int kernelType)
305    //
306
307    /**
308     * Initialize with one of predefined kernels.
309     * See SVM::KernelTypes.
310     * @param kernelType automatically generated
311     */
312    public void setKernel(int kernelType) {
313        setKernel_0(nativeObj, kernelType);
314    }
315
316
317    //
318    // C++:  bool cv::ml::SVM::trainAuto(Mat samples, int layout, Mat responses, int kFold = 10, Ptr_ParamGrid Cgrid = SVM::getDefaultGridPtr(SVM::C), Ptr_ParamGrid gammaGrid = SVM::getDefaultGridPtr(SVM::GAMMA), Ptr_ParamGrid pGrid = SVM::getDefaultGridPtr(SVM::P), Ptr_ParamGrid nuGrid = SVM::getDefaultGridPtr(SVM::NU), Ptr_ParamGrid coeffGrid = SVM::getDefaultGridPtr(SVM::COEF), Ptr_ParamGrid degreeGrid = SVM::getDefaultGridPtr(SVM::DEGREE), bool balanced = false)
319    //
320
321    /**
322     * Trains an %SVM with optimal parameters
323     *
324     *     @param samples training samples
325     *     @param layout See ml::SampleTypes.
326     *     @param responses vector of responses associated with the training samples.
327     *     @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
328     *         subset is used to test the model, the others form the train set. So, the %SVM algorithm is
329     *     @param Cgrid grid for C
330     *     @param gammaGrid grid for gamma
331     *     @param pGrid grid for p
332     *     @param nuGrid grid for nu
333     *     @param coeffGrid grid for coeff
334     *     @param degreeGrid grid for degree
335     *     @param balanced If true and the problem is 2-class classification then the method creates more
336     *         balanced cross-validation subsets that is proportions between classes in subsets are close
337     *         to such proportion in the whole train dataset.
338     *
339     *     The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
340     *     nu, coef0, degree. Parameters are considered optimal when the cross-validation
341     *     estimate of the test set error is minimal.
342     *
343     *     This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
344     *     offers rudimentary parameter options.
345     *
346     *     This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
347     *     regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
348     *     the usual %SVM with parameters specified in params is executed.
349     * @return automatically generated
350     */
351    public boolean trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid, ParamGrid pGrid, ParamGrid nuGrid, ParamGrid coeffGrid, ParamGrid degreeGrid, boolean balanced) {
352        return trainAuto_0(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr(), pGrid.getNativeObjAddr(), nuGrid.getNativeObjAddr(), coeffGrid.getNativeObjAddr(), degreeGrid.getNativeObjAddr(), balanced);
353    }
354
355    /**
356     * Trains an %SVM with optimal parameters
357     *
358     *     @param samples training samples
359     *     @param layout See ml::SampleTypes.
360     *     @param responses vector of responses associated with the training samples.
361     *     @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
362     *         subset is used to test the model, the others form the train set. So, the %SVM algorithm is
363     *     @param Cgrid grid for C
364     *     @param gammaGrid grid for gamma
365     *     @param pGrid grid for p
366     *     @param nuGrid grid for nu
367     *     @param coeffGrid grid for coeff
368     *     @param degreeGrid grid for degree
369     *         balanced cross-validation subsets that is proportions between classes in subsets are close
370     *         to such proportion in the whole train dataset.
371     *
372     *     The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
373     *     nu, coef0, degree. Parameters are considered optimal when the cross-validation
374     *     estimate of the test set error is minimal.
375     *
376     *     This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
377     *     offers rudimentary parameter options.
378     *
379     *     This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
380     *     regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
381     *     the usual %SVM with parameters specified in params is executed.
382     * @return automatically generated
383     */
384    public boolean trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid, ParamGrid pGrid, ParamGrid nuGrid, ParamGrid coeffGrid, ParamGrid degreeGrid) {
385        return trainAuto_1(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr(), pGrid.getNativeObjAddr(), nuGrid.getNativeObjAddr(), coeffGrid.getNativeObjAddr(), degreeGrid.getNativeObjAddr());
386    }
387
388    /**
389     * Trains an %SVM with optimal parameters
390     *
391     *     @param samples training samples
392     *     @param layout See ml::SampleTypes.
393     *     @param responses vector of responses associated with the training samples.
394     *     @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
395     *         subset is used to test the model, the others form the train set. So, the %SVM algorithm is
396     *     @param Cgrid grid for C
397     *     @param gammaGrid grid for gamma
398     *     @param pGrid grid for p
399     *     @param nuGrid grid for nu
400     *     @param coeffGrid grid for coeff
401     *         balanced cross-validation subsets that is proportions between classes in subsets are close
402     *         to such proportion in the whole train dataset.
403     *
404     *     The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
405     *     nu, coef0, degree. Parameters are considered optimal when the cross-validation
406     *     estimate of the test set error is minimal.
407     *
408     *     This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
409     *     offers rudimentary parameter options.
410     *
411     *     This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
412     *     regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
413     *     the usual %SVM with parameters specified in params is executed.
414     * @return automatically generated
415     */
416    public boolean trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid, ParamGrid pGrid, ParamGrid nuGrid, ParamGrid coeffGrid) {
417        return trainAuto_2(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr(), pGrid.getNativeObjAddr(), nuGrid.getNativeObjAddr(), coeffGrid.getNativeObjAddr());
418    }
419
420    /**
421     * Trains an %SVM with optimal parameters
422     *
423     *     @param samples training samples
424     *     @param layout See ml::SampleTypes.
425     *     @param responses vector of responses associated with the training samples.
426     *     @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
427     *         subset is used to test the model, the others form the train set. So, the %SVM algorithm is
428     *     @param Cgrid grid for C
429     *     @param gammaGrid grid for gamma
430     *     @param pGrid grid for p
431     *     @param nuGrid grid for nu
432     *         balanced cross-validation subsets that is proportions between classes in subsets are close
433     *         to such proportion in the whole train dataset.
434     *
435     *     The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
436     *     nu, coef0, degree. Parameters are considered optimal when the cross-validation
437     *     estimate of the test set error is minimal.
438     *
439     *     This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
440     *     offers rudimentary parameter options.
441     *
442     *     This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
443     *     regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
444     *     the usual %SVM with parameters specified in params is executed.
445     * @return automatically generated
446     */
447    public boolean trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid, ParamGrid pGrid, ParamGrid nuGrid) {
448        return trainAuto_3(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr(), pGrid.getNativeObjAddr(), nuGrid.getNativeObjAddr());
449    }
450
451    /**
452     * Trains an %SVM with optimal parameters
453     *
454     *     @param samples training samples
455     *     @param layout See ml::SampleTypes.
456     *     @param responses vector of responses associated with the training samples.
457     *     @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
458     *         subset is used to test the model, the others form the train set. So, the %SVM algorithm is
459     *     @param Cgrid grid for C
460     *     @param gammaGrid grid for gamma
461     *     @param pGrid grid for p
462     *         balanced cross-validation subsets that is proportions between classes in subsets are close
463     *         to such proportion in the whole train dataset.
464     *
465     *     The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
466     *     nu, coef0, degree. Parameters are considered optimal when the cross-validation
467     *     estimate of the test set error is minimal.
468     *
469     *     This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
470     *     offers rudimentary parameter options.
471     *
472     *     This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
473     *     regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
474     *     the usual %SVM with parameters specified in params is executed.
475     * @return automatically generated
476     */
477    public boolean trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid, ParamGrid pGrid) {
478        return trainAuto_4(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr(), pGrid.getNativeObjAddr());
479    }
480
481    /**
482     * Trains an %SVM with optimal parameters
483     *
484     *     @param samples training samples
485     *     @param layout See ml::SampleTypes.
486     *     @param responses vector of responses associated with the training samples.
487     *     @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
488     *         subset is used to test the model, the others form the train set. So, the %SVM algorithm is
489     *     @param Cgrid grid for C
490     *     @param gammaGrid grid for gamma
491     *         balanced cross-validation subsets that is proportions between classes in subsets are close
492     *         to such proportion in the whole train dataset.
493     *
494     *     The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
495     *     nu, coef0, degree. Parameters are considered optimal when the cross-validation
496     *     estimate of the test set error is minimal.
497     *
498     *     This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
499     *     offers rudimentary parameter options.
500     *
501     *     This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
502     *     regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
503     *     the usual %SVM with parameters specified in params is executed.
504     * @return automatically generated
505     */
506    public boolean trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid, ParamGrid gammaGrid) {
507        return trainAuto_5(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr(), gammaGrid.getNativeObjAddr());
508    }
509
510    /**
511     * Trains an %SVM with optimal parameters
512     *
513     *     @param samples training samples
514     *     @param layout See ml::SampleTypes.
515     *     @param responses vector of responses associated with the training samples.
516     *     @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
517     *         subset is used to test the model, the others form the train set. So, the %SVM algorithm is
518     *     @param Cgrid grid for C
519     *         balanced cross-validation subsets that is proportions between classes in subsets are close
520     *         to such proportion in the whole train dataset.
521     *
522     *     The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
523     *     nu, coef0, degree. Parameters are considered optimal when the cross-validation
524     *     estimate of the test set error is minimal.
525     *
526     *     This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
527     *     offers rudimentary parameter options.
528     *
529     *     This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
530     *     regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
531     *     the usual %SVM with parameters specified in params is executed.
532     * @return automatically generated
533     */
534    public boolean trainAuto(Mat samples, int layout, Mat responses, int kFold, ParamGrid Cgrid) {
535        return trainAuto_6(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold, Cgrid.getNativeObjAddr());
536    }
537
538    /**
539     * Trains an %SVM with optimal parameters
540     *
541     *     @param samples training samples
542     *     @param layout See ml::SampleTypes.
543     *     @param responses vector of responses associated with the training samples.
544     *     @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
545     *         subset is used to test the model, the others form the train set. So, the %SVM algorithm is
546     *         balanced cross-validation subsets that is proportions between classes in subsets are close
547     *         to such proportion in the whole train dataset.
548     *
549     *     The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
550     *     nu, coef0, degree. Parameters are considered optimal when the cross-validation
551     *     estimate of the test set error is minimal.
552     *
553     *     This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
554     *     offers rudimentary parameter options.
555     *
556     *     This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
557     *     regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
558     *     the usual %SVM with parameters specified in params is executed.
559     * @return automatically generated
560     */
561    public boolean trainAuto(Mat samples, int layout, Mat responses, int kFold) {
562        return trainAuto_7(nativeObj, samples.nativeObj, layout, responses.nativeObj, kFold);
563    }
564
565    /**
566     * Trains an %SVM with optimal parameters
567     *
568     *     @param samples training samples
569     *     @param layout See ml::SampleTypes.
570     *     @param responses vector of responses associated with the training samples.
571     *         subset is used to test the model, the others form the train set. So, the %SVM algorithm is
572     *         balanced cross-validation subsets that is proportions between classes in subsets are close
573     *         to such proportion in the whole train dataset.
574     *
575     *     The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
576     *     nu, coef0, degree. Parameters are considered optimal when the cross-validation
577     *     estimate of the test set error is minimal.
578     *
579     *     This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
580     *     offers rudimentary parameter options.
581     *
582     *     This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
583     *     regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
584     *     the usual %SVM with parameters specified in params is executed.
585     * @return automatically generated
586     */
587    public boolean trainAuto(Mat samples, int layout, Mat responses) {
588        return trainAuto_8(nativeObj, samples.nativeObj, layout, responses.nativeObj);
589    }
590
591
592    //
593    // C++:  Mat cv::ml::SVM::getSupportVectors()
594    //
595
596    /**
597     * Retrieves all the support vectors
598     *
599     *     The method returns all the support vectors as a floating-point matrix, where support vectors are
600     *     stored as matrix rows.
601     * @return automatically generated
602     */
603    public Mat getSupportVectors() {
604        return new Mat(getSupportVectors_0(nativeObj));
605    }
606
607
608    //
609    // C++:  Mat cv::ml::SVM::getUncompressedSupportVectors()
610    //
611
612    /**
613     * Retrieves all the uncompressed support vectors of a linear %SVM
614     *
615     *     The method returns all the uncompressed support vectors of a linear %SVM that the compressed
616     *     support vector, used for prediction, was derived from. They are returned in a floating-point
617     *     matrix, where the support vectors are stored as matrix rows.
618     * @return automatically generated
619     */
620    public Mat getUncompressedSupportVectors() {
621        return new Mat(getUncompressedSupportVectors_0(nativeObj));
622    }
623
624
625    //
626    // C++:  double cv::ml::SVM::getDecisionFunction(int i, Mat& alpha, Mat& svidx)
627    //
628
629    /**
630     * Retrieves the decision function
631     *
632     *     @param i the index of the decision function. If the problem solved is regression, 1-class or
633     *         2-class classification, then there will be just one decision function and the index should
634     *         always be 0. Otherwise, in the case of N-class classification, there will be \(N(N-1)/2\)
635     *         decision functions.
636     *     @param alpha the optional output vector for weights, corresponding to different support vectors.
637     *         In the case of linear %SVM all the alpha's will be 1's.
638     *     @param svidx the optional output vector of indices of support vectors within the matrix of
639     *         support vectors (which can be retrieved by SVM::getSupportVectors). In the case of linear
640     *         %SVM each decision function consists of a single "compressed" support vector.
641     *
642     *     The method returns rho parameter of the decision function, a scalar subtracted from the weighted
643     *     sum of kernel responses.
644     * @return automatically generated
645     */
646    public double getDecisionFunction(int i, Mat alpha, Mat svidx) {
647        return getDecisionFunction_0(nativeObj, i, alpha.nativeObj, svidx.nativeObj);
648    }
649
650
651    //
652    // C++: static Ptr_ParamGrid cv::ml::SVM::getDefaultGridPtr(int param_id)
653    //
654
655    /**
656     * Generates a grid for %SVM parameters.
657     *
658     *     @param param_id %SVM parameters IDs that must be one of the SVM::ParamTypes. The grid is
659     *     generated for the parameter with this ID.
660     *
661     *     The function generates a grid pointer for the specified parameter of the %SVM algorithm.
662     *     The grid may be passed to the function SVM::trainAuto.
663     * @return automatically generated
664     */
665    public static ParamGrid getDefaultGridPtr(int param_id) {
666        return ParamGrid.__fromPtr__(getDefaultGridPtr_0(param_id));
667    }
668
669
670    //
671    // C++: static Ptr_SVM cv::ml::SVM::create()
672    //
673
674    /**
675     * Creates empty model.
676     *     Use StatModel::train to train the model. Since %SVM has several parameters, you may want to
677     * find the best parameters for your problem, it can be done with SVM::trainAuto.
678     * @return automatically generated
679     */
680    public static SVM create() {
681        return SVM.__fromPtr__(create_0());
682    }
683
684
685    //
686    // C++: static Ptr_SVM cv::ml::SVM::load(String filepath)
687    //
688
689    /**
690     * Loads and creates a serialized svm from a file
691     *
692     * Use SVM::save to serialize and store an SVM to disk.
693     * Load the SVM from this file again, by calling this function with the path to the file.
694     *
695     * @param filepath path to serialized svm
696     * @return automatically generated
697     */
698    public static SVM load(String filepath) {
699        return SVM.__fromPtr__(load_0(filepath));
700    }
701
702
703    @Override
704    protected void finalize() throws Throwable {
705        delete(nativeObj);
706    }
707
708
709
710    // C++:  int cv::ml::SVM::getType()
711    private static native int getType_0(long nativeObj);
712
713    // C++:  void cv::ml::SVM::setType(int val)
714    private static native void setType_0(long nativeObj, int val);
715
716    // C++:  double cv::ml::SVM::getGamma()
717    private static native double getGamma_0(long nativeObj);
718
719    // C++:  void cv::ml::SVM::setGamma(double val)
720    private static native void setGamma_0(long nativeObj, double val);
721
722    // C++:  double cv::ml::SVM::getCoef0()
723    private static native double getCoef0_0(long nativeObj);
724
725    // C++:  void cv::ml::SVM::setCoef0(double val)
726    private static native void setCoef0_0(long nativeObj, double val);
727
728    // C++:  double cv::ml::SVM::getDegree()
729    private static native double getDegree_0(long nativeObj);
730
731    // C++:  void cv::ml::SVM::setDegree(double val)
732    private static native void setDegree_0(long nativeObj, double val);
733
734    // C++:  double cv::ml::SVM::getC()
735    private static native double getC_0(long nativeObj);
736
737    // C++:  void cv::ml::SVM::setC(double val)
738    private static native void setC_0(long nativeObj, double val);
739
740    // C++:  double cv::ml::SVM::getNu()
741    private static native double getNu_0(long nativeObj);
742
743    // C++:  void cv::ml::SVM::setNu(double val)
744    private static native void setNu_0(long nativeObj, double val);
745
746    // C++:  double cv::ml::SVM::getP()
747    private static native double getP_0(long nativeObj);
748
749    // C++:  void cv::ml::SVM::setP(double val)
750    private static native void setP_0(long nativeObj, double val);
751
752    // C++:  Mat cv::ml::SVM::getClassWeights()
753    private static native long getClassWeights_0(long nativeObj);
754
755    // C++:  void cv::ml::SVM::setClassWeights(Mat val)
756    private static native void setClassWeights_0(long nativeObj, long val_nativeObj);
757
758    // C++:  TermCriteria cv::ml::SVM::getTermCriteria()
759    private static native double[] getTermCriteria_0(long nativeObj);
760
761    // C++:  void cv::ml::SVM::setTermCriteria(TermCriteria val)
762    private static native void setTermCriteria_0(long nativeObj, int val_type, int val_maxCount, double val_epsilon);
763
764    // C++:  int cv::ml::SVM::getKernelType()
765    private static native int getKernelType_0(long nativeObj);
766
767    // C++:  void cv::ml::SVM::setKernel(int kernelType)
768    private static native void setKernel_0(long nativeObj, int kernelType);
769
770    // C++:  bool cv::ml::SVM::trainAuto(Mat samples, int layout, Mat responses, int kFold = 10, Ptr_ParamGrid Cgrid = SVM::getDefaultGridPtr(SVM::C), Ptr_ParamGrid gammaGrid = SVM::getDefaultGridPtr(SVM::GAMMA), Ptr_ParamGrid pGrid = SVM::getDefaultGridPtr(SVM::P), Ptr_ParamGrid nuGrid = SVM::getDefaultGridPtr(SVM::NU), Ptr_ParamGrid coeffGrid = SVM::getDefaultGridPtr(SVM::COEF), Ptr_ParamGrid degreeGrid = SVM::getDefaultGridPtr(SVM::DEGREE), bool balanced = false)
771    private static native boolean trainAuto_0(long nativeObj, long samples_nativeObj, int layout, long responses_nativeObj, int kFold, long Cgrid_nativeObj, long gammaGrid_nativeObj, long pGrid_nativeObj, long nuGrid_nativeObj, long coeffGrid_nativeObj, long degreeGrid_nativeObj, boolean balanced);
772    private static native boolean trainAuto_1(long nativeObj, long samples_nativeObj, int layout, long responses_nativeObj, int kFold, long Cgrid_nativeObj, long gammaGrid_nativeObj, long pGrid_nativeObj, long nuGrid_nativeObj, long coeffGrid_nativeObj, long degreeGrid_nativeObj);
773    private static native boolean trainAuto_2(long nativeObj, long samples_nativeObj, int layout, long responses_nativeObj, int kFold, long Cgrid_nativeObj, long gammaGrid_nativeObj, long pGrid_nativeObj, long nuGrid_nativeObj, long coeffGrid_nativeObj);
774    private static native boolean trainAuto_3(long nativeObj, long samples_nativeObj, int layout, long responses_nativeObj, int kFold, long Cgrid_nativeObj, long gammaGrid_nativeObj, long pGrid_nativeObj, long nuGrid_nativeObj);
775    private static native boolean trainAuto_4(long nativeObj, long samples_nativeObj, int layout, long responses_nativeObj, int kFold, long Cgrid_nativeObj, long gammaGrid_nativeObj, long pGrid_nativeObj);
776    private static native boolean trainAuto_5(long nativeObj, long samples_nativeObj, int layout, long responses_nativeObj, int kFold, long Cgrid_nativeObj, long gammaGrid_nativeObj);
777    private static native boolean trainAuto_6(long nativeObj, long samples_nativeObj, int layout, long responses_nativeObj, int kFold, long Cgrid_nativeObj);
778    private static native boolean trainAuto_7(long nativeObj, long samples_nativeObj, int layout, long responses_nativeObj, int kFold);
779    private static native boolean trainAuto_8(long nativeObj, long samples_nativeObj, int layout, long responses_nativeObj);
780
781    // C++:  Mat cv::ml::SVM::getSupportVectors()
782    private static native long getSupportVectors_0(long nativeObj);
783
784    // C++:  Mat cv::ml::SVM::getUncompressedSupportVectors()
785    private static native long getUncompressedSupportVectors_0(long nativeObj);
786
787    // C++:  double cv::ml::SVM::getDecisionFunction(int i, Mat& alpha, Mat& svidx)
788    private static native double getDecisionFunction_0(long nativeObj, int i, long alpha_nativeObj, long svidx_nativeObj);
789
790    // C++: static Ptr_ParamGrid cv::ml::SVM::getDefaultGridPtr(int param_id)
791    private static native long getDefaultGridPtr_0(int param_id);
792
793    // C++: static Ptr_SVM cv::ml::SVM::create()
794    private static native long create_0();
795
796    // C++: static Ptr_SVM cv::ml::SVM::load(String filepath)
797    private static native long load_0(String filepath);
798
799    // native support for java finalize()
800    private static native void delete(long nativeObj);
801
802}