001//
002// This file is auto-generated. Please don't modify it!
003//
004package org.opencv.ml;
005
006import org.opencv.core.Mat;
007import org.opencv.core.TermCriteria;
008import org.opencv.ml.ANN_MLP;
009import org.opencv.ml.StatModel;
010
011// C++: class ANN_MLP
012/**
013 * Artificial Neural Networks - Multi-Layer Perceptrons.
014 *
015 * Unlike many other models in ML that are constructed and trained at once, in the MLP model these
016 * steps are separated. First, a network with the specified topology is created using the non-default
017 * constructor or the method ANN_MLP::create. All the weights are set to zeros. Then, the network is
018 * trained using a set of input and output vectors. The training procedure can be repeated more than
019 * once, that is, the weights can be adjusted based on the new training data.
020 *
021 * Additional flags for StatModel::train are available: ANN_MLP::TrainFlags.
022 *
023 * SEE: REF: ml_intro_ann
024 */
025public class ANN_MLP extends StatModel {
026
027    protected ANN_MLP(long addr) { super(addr); }
028
029    // internal usage only
030    public static ANN_MLP __fromPtr__(long addr) { return new ANN_MLP(addr); }
031
032    // C++: enum ActivationFunctions (cv.ml.ANN_MLP.ActivationFunctions)
033    public static final int
034            IDENTITY = 0,
035            SIGMOID_SYM = 1,
036            GAUSSIAN = 2,
037            RELU = 3,
038            LEAKYRELU = 4;
039
040
041    // C++: enum TrainFlags (cv.ml.ANN_MLP.TrainFlags)
042    public static final int
043            UPDATE_WEIGHTS = 1,
044            NO_INPUT_SCALE = 2,
045            NO_OUTPUT_SCALE = 4;
046
047
048    // C++: enum TrainingMethods (cv.ml.ANN_MLP.TrainingMethods)
049    public static final int
050            BACKPROP = 0,
051            RPROP = 1,
052            ANNEAL = 2;
053
054
055    //
056    // C++:  void cv::ml::ANN_MLP::setTrainMethod(int method, double param1 = 0, double param2 = 0)
057    //
058
059    /**
060     * Sets training method and common parameters.
061     *     @param method Default value is ANN_MLP::RPROP. See ANN_MLP::TrainingMethods.
062     *     @param param1 passed to setRpropDW0 for ANN_MLP::RPROP and to setBackpropWeightScale for ANN_MLP::BACKPROP and to initialT for ANN_MLP::ANNEAL.
063     *     @param param2 passed to setRpropDWMin for ANN_MLP::RPROP and to setBackpropMomentumScale for ANN_MLP::BACKPROP and to finalT for ANN_MLP::ANNEAL.
064     */
065    public void setTrainMethod(int method, double param1, double param2) {
066        setTrainMethod_0(nativeObj, method, param1, param2);
067    }
068
069    /**
070     * Sets training method and common parameters.
071     *     @param method Default value is ANN_MLP::RPROP. See ANN_MLP::TrainingMethods.
072     *     @param param1 passed to setRpropDW0 for ANN_MLP::RPROP and to setBackpropWeightScale for ANN_MLP::BACKPROP and to initialT for ANN_MLP::ANNEAL.
073     */
074    public void setTrainMethod(int method, double param1) {
075        setTrainMethod_1(nativeObj, method, param1);
076    }
077
078    /**
079     * Sets training method and common parameters.
080     *     @param method Default value is ANN_MLP::RPROP. See ANN_MLP::TrainingMethods.
081     */
082    public void setTrainMethod(int method) {
083        setTrainMethod_2(nativeObj, method);
084    }
085
086
087    //
088    // C++:  int cv::ml::ANN_MLP::getTrainMethod()
089    //
090
091    /**
092     * Returns current training method
093     * @return automatically generated
094     */
095    public int getTrainMethod() {
096        return getTrainMethod_0(nativeObj);
097    }
098
099
100    //
101    // C++:  void cv::ml::ANN_MLP::setActivationFunction(int type, double param1 = 0, double param2 = 0)
102    //
103
104    /**
105     * Initialize the activation function for each neuron.
106     *     Currently the default and the only fully supported activation function is ANN_MLP::SIGMOID_SYM.
107     *     @param type The type of activation function. See ANN_MLP::ActivationFunctions.
108     *     @param param1 The first parameter of the activation function, \(\alpha\). Default value is 0.
109     *     @param param2 The second parameter of the activation function, \(\beta\). Default value is 0.
110     */
111    public void setActivationFunction(int type, double param1, double param2) {
112        setActivationFunction_0(nativeObj, type, param1, param2);
113    }
114
115    /**
116     * Initialize the activation function for each neuron.
117     *     Currently the default and the only fully supported activation function is ANN_MLP::SIGMOID_SYM.
118     *     @param type The type of activation function. See ANN_MLP::ActivationFunctions.
119     *     @param param1 The first parameter of the activation function, \(\alpha\). Default value is 0.
120     */
121    public void setActivationFunction(int type, double param1) {
122        setActivationFunction_1(nativeObj, type, param1);
123    }
124
125    /**
126     * Initialize the activation function for each neuron.
127     *     Currently the default and the only fully supported activation function is ANN_MLP::SIGMOID_SYM.
128     *     @param type The type of activation function. See ANN_MLP::ActivationFunctions.
129     */
130    public void setActivationFunction(int type) {
131        setActivationFunction_2(nativeObj, type);
132    }
133
134
135    //
136    // C++:  void cv::ml::ANN_MLP::setLayerSizes(Mat _layer_sizes)
137    //
138
139    /**
140     * Integer vector specifying the number of neurons in each layer including the input and output layers.
141     *     The very first element specifies the number of elements in the input layer.
142     *     The last element - number of elements in the output layer. Default value is empty Mat.
143     * SEE: getLayerSizes
144     * @param _layer_sizes automatically generated
145     */
146    public void setLayerSizes(Mat _layer_sizes) {
147        setLayerSizes_0(nativeObj, _layer_sizes.nativeObj);
148    }
149
150
151    //
152    // C++:  Mat cv::ml::ANN_MLP::getLayerSizes()
153    //
154
155    /**
156     * Integer vector specifying the number of neurons in each layer including the input and output layers.
157     *     The very first element specifies the number of elements in the input layer.
158     *     The last element - number of elements in the output layer.
159     * SEE: setLayerSizes
160     * @return automatically generated
161     */
162    public Mat getLayerSizes() {
163        return new Mat(getLayerSizes_0(nativeObj));
164    }
165
166
167    //
168    // C++:  TermCriteria cv::ml::ANN_MLP::getTermCriteria()
169    //
170
171    /**
172     * SEE: setTermCriteria
173     * @return automatically generated
174     */
175    public TermCriteria getTermCriteria() {
176        return new TermCriteria(getTermCriteria_0(nativeObj));
177    }
178
179
180    //
181    // C++:  void cv::ml::ANN_MLP::setTermCriteria(TermCriteria val)
182    //
183
184    /**
185     *  getTermCriteria SEE: getTermCriteria
186     * @param val automatically generated
187     */
188    public void setTermCriteria(TermCriteria val) {
189        setTermCriteria_0(nativeObj, val.type, val.maxCount, val.epsilon);
190    }
191
192
193    //
194    // C++:  double cv::ml::ANN_MLP::getBackpropWeightScale()
195    //
196
197    /**
198     * SEE: setBackpropWeightScale
199     * @return automatically generated
200     */
201    public double getBackpropWeightScale() {
202        return getBackpropWeightScale_0(nativeObj);
203    }
204
205
206    //
207    // C++:  void cv::ml::ANN_MLP::setBackpropWeightScale(double val)
208    //
209
210    /**
211     *  getBackpropWeightScale SEE: getBackpropWeightScale
212     * @param val automatically generated
213     */
214    public void setBackpropWeightScale(double val) {
215        setBackpropWeightScale_0(nativeObj, val);
216    }
217
218
219    //
220    // C++:  double cv::ml::ANN_MLP::getBackpropMomentumScale()
221    //
222
223    /**
224     * SEE: setBackpropMomentumScale
225     * @return automatically generated
226     */
227    public double getBackpropMomentumScale() {
228        return getBackpropMomentumScale_0(nativeObj);
229    }
230
231
232    //
233    // C++:  void cv::ml::ANN_MLP::setBackpropMomentumScale(double val)
234    //
235
236    /**
237     *  getBackpropMomentumScale SEE: getBackpropMomentumScale
238     * @param val automatically generated
239     */
240    public void setBackpropMomentumScale(double val) {
241        setBackpropMomentumScale_0(nativeObj, val);
242    }
243
244
245    //
246    // C++:  double cv::ml::ANN_MLP::getRpropDW0()
247    //
248
249    /**
250     * SEE: setRpropDW0
251     * @return automatically generated
252     */
253    public double getRpropDW0() {
254        return getRpropDW0_0(nativeObj);
255    }
256
257
258    //
259    // C++:  void cv::ml::ANN_MLP::setRpropDW0(double val)
260    //
261
262    /**
263     *  getRpropDW0 SEE: getRpropDW0
264     * @param val automatically generated
265     */
266    public void setRpropDW0(double val) {
267        setRpropDW0_0(nativeObj, val);
268    }
269
270
271    //
272    // C++:  double cv::ml::ANN_MLP::getRpropDWPlus()
273    //
274
275    /**
276     * SEE: setRpropDWPlus
277     * @return automatically generated
278     */
279    public double getRpropDWPlus() {
280        return getRpropDWPlus_0(nativeObj);
281    }
282
283
284    //
285    // C++:  void cv::ml::ANN_MLP::setRpropDWPlus(double val)
286    //
287
288    /**
289     *  getRpropDWPlus SEE: getRpropDWPlus
290     * @param val automatically generated
291     */
292    public void setRpropDWPlus(double val) {
293        setRpropDWPlus_0(nativeObj, val);
294    }
295
296
297    //
298    // C++:  double cv::ml::ANN_MLP::getRpropDWMinus()
299    //
300
301    /**
302     * SEE: setRpropDWMinus
303     * @return automatically generated
304     */
305    public double getRpropDWMinus() {
306        return getRpropDWMinus_0(nativeObj);
307    }
308
309
310    //
311    // C++:  void cv::ml::ANN_MLP::setRpropDWMinus(double val)
312    //
313
314    /**
315     *  getRpropDWMinus SEE: getRpropDWMinus
316     * @param val automatically generated
317     */
318    public void setRpropDWMinus(double val) {
319        setRpropDWMinus_0(nativeObj, val);
320    }
321
322
323    //
324    // C++:  double cv::ml::ANN_MLP::getRpropDWMin()
325    //
326
327    /**
328     * SEE: setRpropDWMin
329     * @return automatically generated
330     */
331    public double getRpropDWMin() {
332        return getRpropDWMin_0(nativeObj);
333    }
334
335
336    //
337    // C++:  void cv::ml::ANN_MLP::setRpropDWMin(double val)
338    //
339
340    /**
341     *  getRpropDWMin SEE: getRpropDWMin
342     * @param val automatically generated
343     */
344    public void setRpropDWMin(double val) {
345        setRpropDWMin_0(nativeObj, val);
346    }
347
348
349    //
350    // C++:  double cv::ml::ANN_MLP::getRpropDWMax()
351    //
352
353    /**
354     * SEE: setRpropDWMax
355     * @return automatically generated
356     */
357    public double getRpropDWMax() {
358        return getRpropDWMax_0(nativeObj);
359    }
360
361
362    //
363    // C++:  void cv::ml::ANN_MLP::setRpropDWMax(double val)
364    //
365
366    /**
367     *  getRpropDWMax SEE: getRpropDWMax
368     * @param val automatically generated
369     */
370    public void setRpropDWMax(double val) {
371        setRpropDWMax_0(nativeObj, val);
372    }
373
374
375    //
376    // C++:  double cv::ml::ANN_MLP::getAnnealInitialT()
377    //
378
379    /**
380     * SEE: setAnnealInitialT
381     * @return automatically generated
382     */
383    public double getAnnealInitialT() {
384        return getAnnealInitialT_0(nativeObj);
385    }
386
387
388    //
389    // C++:  void cv::ml::ANN_MLP::setAnnealInitialT(double val)
390    //
391
392    /**
393     *  getAnnealInitialT SEE: getAnnealInitialT
394     * @param val automatically generated
395     */
396    public void setAnnealInitialT(double val) {
397        setAnnealInitialT_0(nativeObj, val);
398    }
399
400
401    //
402    // C++:  double cv::ml::ANN_MLP::getAnnealFinalT()
403    //
404
405    /**
406     * SEE: setAnnealFinalT
407     * @return automatically generated
408     */
409    public double getAnnealFinalT() {
410        return getAnnealFinalT_0(nativeObj);
411    }
412
413
414    //
415    // C++:  void cv::ml::ANN_MLP::setAnnealFinalT(double val)
416    //
417
418    /**
419     *  getAnnealFinalT SEE: getAnnealFinalT
420     * @param val automatically generated
421     */
422    public void setAnnealFinalT(double val) {
423        setAnnealFinalT_0(nativeObj, val);
424    }
425
426
427    //
428    // C++:  double cv::ml::ANN_MLP::getAnnealCoolingRatio()
429    //
430
431    /**
432     * SEE: setAnnealCoolingRatio
433     * @return automatically generated
434     */
435    public double getAnnealCoolingRatio() {
436        return getAnnealCoolingRatio_0(nativeObj);
437    }
438
439
440    //
441    // C++:  void cv::ml::ANN_MLP::setAnnealCoolingRatio(double val)
442    //
443
444    /**
445     *  getAnnealCoolingRatio SEE: getAnnealCoolingRatio
446     * @param val automatically generated
447     */
448    public void setAnnealCoolingRatio(double val) {
449        setAnnealCoolingRatio_0(nativeObj, val);
450    }
451
452
453    //
454    // C++:  int cv::ml::ANN_MLP::getAnnealItePerStep()
455    //
456
457    /**
458     * SEE: setAnnealItePerStep
459     * @return automatically generated
460     */
461    public int getAnnealItePerStep() {
462        return getAnnealItePerStep_0(nativeObj);
463    }
464
465
466    //
467    // C++:  void cv::ml::ANN_MLP::setAnnealItePerStep(int val)
468    //
469
470    /**
471     *  getAnnealItePerStep SEE: getAnnealItePerStep
472     * @param val automatically generated
473     */
474    public void setAnnealItePerStep(int val) {
475        setAnnealItePerStep_0(nativeObj, val);
476    }
477
478
479    //
480    // C++:  Mat cv::ml::ANN_MLP::getWeights(int layerIdx)
481    //
482
483    public Mat getWeights(int layerIdx) {
484        return new Mat(getWeights_0(nativeObj, layerIdx));
485    }
486
487
488    //
489    // C++: static Ptr_ANN_MLP cv::ml::ANN_MLP::create()
490    //
491
492    /**
493     * Creates empty model
494     *
495     *     Use StatModel::train to train the model, Algorithm::load<ANN_MLP>(filename) to load the pre-trained model.
496     *     Note that the train method has optional flags: ANN_MLP::TrainFlags.
497     * @return automatically generated
498     */
499    public static ANN_MLP create() {
500        return ANN_MLP.__fromPtr__(create_0());
501    }
502
503
504    //
505    // C++: static Ptr_ANN_MLP cv::ml::ANN_MLP::load(String filepath)
506    //
507
508    /**
509     * Loads and creates a serialized ANN from a file
510     *
511     * Use ANN::save to serialize and store an ANN to disk.
512     * Load the ANN from this file again, by calling this function with the path to the file.
513     *
514     * @param filepath path to serialized ANN
515     * @return automatically generated
516     */
517    public static ANN_MLP load(String filepath) {
518        return ANN_MLP.__fromPtr__(load_0(filepath));
519    }
520
521
522    @Override
523    protected void finalize() throws Throwable {
524        delete(nativeObj);
525    }
526
527
528
529    // C++:  void cv::ml::ANN_MLP::setTrainMethod(int method, double param1 = 0, double param2 = 0)
530    private static native void setTrainMethod_0(long nativeObj, int method, double param1, double param2);
531    private static native void setTrainMethod_1(long nativeObj, int method, double param1);
532    private static native void setTrainMethod_2(long nativeObj, int method);
533
534    // C++:  int cv::ml::ANN_MLP::getTrainMethod()
535    private static native int getTrainMethod_0(long nativeObj);
536
537    // C++:  void cv::ml::ANN_MLP::setActivationFunction(int type, double param1 = 0, double param2 = 0)
538    private static native void setActivationFunction_0(long nativeObj, int type, double param1, double param2);
539    private static native void setActivationFunction_1(long nativeObj, int type, double param1);
540    private static native void setActivationFunction_2(long nativeObj, int type);
541
542    // C++:  void cv::ml::ANN_MLP::setLayerSizes(Mat _layer_sizes)
543    private static native void setLayerSizes_0(long nativeObj, long _layer_sizes_nativeObj);
544
545    // C++:  Mat cv::ml::ANN_MLP::getLayerSizes()
546    private static native long getLayerSizes_0(long nativeObj);
547
548    // C++:  TermCriteria cv::ml::ANN_MLP::getTermCriteria()
549    private static native double[] getTermCriteria_0(long nativeObj);
550
551    // C++:  void cv::ml::ANN_MLP::setTermCriteria(TermCriteria val)
552    private static native void setTermCriteria_0(long nativeObj, int val_type, int val_maxCount, double val_epsilon);
553
554    // C++:  double cv::ml::ANN_MLP::getBackpropWeightScale()
555    private static native double getBackpropWeightScale_0(long nativeObj);
556
557    // C++:  void cv::ml::ANN_MLP::setBackpropWeightScale(double val)
558    private static native void setBackpropWeightScale_0(long nativeObj, double val);
559
560    // C++:  double cv::ml::ANN_MLP::getBackpropMomentumScale()
561    private static native double getBackpropMomentumScale_0(long nativeObj);
562
563    // C++:  void cv::ml::ANN_MLP::setBackpropMomentumScale(double val)
564    private static native void setBackpropMomentumScale_0(long nativeObj, double val);
565
566    // C++:  double cv::ml::ANN_MLP::getRpropDW0()
567    private static native double getRpropDW0_0(long nativeObj);
568
569    // C++:  void cv::ml::ANN_MLP::setRpropDW0(double val)
570    private static native void setRpropDW0_0(long nativeObj, double val);
571
572    // C++:  double cv::ml::ANN_MLP::getRpropDWPlus()
573    private static native double getRpropDWPlus_0(long nativeObj);
574
575    // C++:  void cv::ml::ANN_MLP::setRpropDWPlus(double val)
576    private static native void setRpropDWPlus_0(long nativeObj, double val);
577
578    // C++:  double cv::ml::ANN_MLP::getRpropDWMinus()
579    private static native double getRpropDWMinus_0(long nativeObj);
580
581    // C++:  void cv::ml::ANN_MLP::setRpropDWMinus(double val)
582    private static native void setRpropDWMinus_0(long nativeObj, double val);
583
584    // C++:  double cv::ml::ANN_MLP::getRpropDWMin()
585    private static native double getRpropDWMin_0(long nativeObj);
586
587    // C++:  void cv::ml::ANN_MLP::setRpropDWMin(double val)
588    private static native void setRpropDWMin_0(long nativeObj, double val);
589
590    // C++:  double cv::ml::ANN_MLP::getRpropDWMax()
591    private static native double getRpropDWMax_0(long nativeObj);
592
593    // C++:  void cv::ml::ANN_MLP::setRpropDWMax(double val)
594    private static native void setRpropDWMax_0(long nativeObj, double val);
595
596    // C++:  double cv::ml::ANN_MLP::getAnnealInitialT()
597    private static native double getAnnealInitialT_0(long nativeObj);
598
599    // C++:  void cv::ml::ANN_MLP::setAnnealInitialT(double val)
600    private static native void setAnnealInitialT_0(long nativeObj, double val);
601
602    // C++:  double cv::ml::ANN_MLP::getAnnealFinalT()
603    private static native double getAnnealFinalT_0(long nativeObj);
604
605    // C++:  void cv::ml::ANN_MLP::setAnnealFinalT(double val)
606    private static native void setAnnealFinalT_0(long nativeObj, double val);
607
608    // C++:  double cv::ml::ANN_MLP::getAnnealCoolingRatio()
609    private static native double getAnnealCoolingRatio_0(long nativeObj);
610
611    // C++:  void cv::ml::ANN_MLP::setAnnealCoolingRatio(double val)
612    private static native void setAnnealCoolingRatio_0(long nativeObj, double val);
613
614    // C++:  int cv::ml::ANN_MLP::getAnnealItePerStep()
615    private static native int getAnnealItePerStep_0(long nativeObj);
616
617    // C++:  void cv::ml::ANN_MLP::setAnnealItePerStep(int val)
618    private static native void setAnnealItePerStep_0(long nativeObj, int val);
619
620    // C++:  Mat cv::ml::ANN_MLP::getWeights(int layerIdx)
621    private static native long getWeights_0(long nativeObj, int layerIdx);
622
623    // C++: static Ptr_ANN_MLP cv::ml::ANN_MLP::create()
624    private static native long create_0();
625
626    // C++: static Ptr_ANN_MLP cv::ml::ANN_MLP::load(String filepath)
627    private static native long load_0(String filepath);
628
629    // native support for java finalize()
630    private static native void delete(long nativeObj);
631
632}