ANN_MLP.h 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420
  1. //
  2. // This file is auto-generated. Please don't modify it!
  3. //
  4. #pragma once
  5. #ifdef __cplusplus
  6. //#import "opencv.hpp"
  7. #import "opencv2/ml.hpp"
  8. #else
  9. #define CV_EXPORTS
  10. #endif
  11. #import <Foundation/Foundation.h>
  12. #import "StatModel.h"
  13. @class Mat;
  14. @class TermCriteria;
  15. // C++: enum ActivationFunctions (cv.ml.ANN_MLP.ActivationFunctions)
  16. typedef NS_ENUM(int, ActivationFunctions) {
  17. IDENTITY = 0,
  18. SIGMOID_SYM = 1,
  19. GAUSSIAN = 2,
  20. RELU = 3,
  21. LEAKYRELU = 4
  22. };
  23. // C++: enum TrainFlags (cv.ml.ANN_MLP.TrainFlags)
  24. typedef NS_ENUM(int, TrainFlags) {
  25. UPDATE_WEIGHTS = 1,
  26. NO_INPUT_SCALE = 2,
  27. NO_OUTPUT_SCALE = 4
  28. };
  29. // C++: enum TrainingMethods (cv.ml.ANN_MLP.TrainingMethods)
  30. typedef NS_ENUM(int, TrainingMethods) {
  31. BACKPROP = 0,
  32. RPROP = 1,
  33. ANNEAL = 2
  34. };
  35. NS_ASSUME_NONNULL_BEGIN
  36. // C++: class ANN_MLP
  37. /**
  38. * Artificial Neural Networks - Multi-Layer Perceptrons.
  39. *
  40. * Unlike many other models in ML that are constructed and trained at once, in the MLP model these
  41. * steps are separated. First, a network with the specified topology is created using the non-default
  42. * constructor or the method ANN_MLP::create. All the weights are set to zeros. Then, the network is
  43. * trained using a set of input and output vectors. The training procedure can be repeated more than
  44. * once, that is, the weights can be adjusted based on the new training data.
  45. *
  46. * Additional flags for StatModel::train are available: ANN_MLP::TrainFlags.
  47. *
  48. * @see REF: ml_intro_ann
  49. *
  50. * Member of `Ml`
  51. */
  52. CV_EXPORTS @interface ANN_MLP : StatModel
  53. #ifdef __cplusplus
  54. @property(readonly)cv::Ptr<cv::ml::ANN_MLP> nativePtrANN_MLP;
  55. #endif
  56. #ifdef __cplusplus
  57. - (instancetype)initWithNativePtr:(cv::Ptr<cv::ml::ANN_MLP>)nativePtr;
  58. + (instancetype)fromNative:(cv::Ptr<cv::ml::ANN_MLP>)nativePtr;
  59. #endif
  60. #pragma mark - Methods
  61. //
  62. // void cv::ml::ANN_MLP::setTrainMethod(int method, double param1 = 0, double param2 = 0)
  63. //
  64. /**
  65. * Sets training method and common parameters.
  66. * @param method Default value is ANN_MLP::RPROP. See ANN_MLP::TrainingMethods.
  67. * @param param1 passed to setRpropDW0 for ANN_MLP::RPROP and to setBackpropWeightScale for ANN_MLP::BACKPROP and to initialT for ANN_MLP::ANNEAL.
  68. * @param param2 passed to setRpropDWMin for ANN_MLP::RPROP and to setBackpropMomentumScale for ANN_MLP::BACKPROP and to finalT for ANN_MLP::ANNEAL.
  69. */
  70. - (void)setTrainMethod:(int)method param1:(double)param1 param2:(double)param2 NS_SWIFT_NAME(setTrainMethod(method:param1:param2:));
  71. /**
  72. * Sets training method and common parameters.
  73. * @param method Default value is ANN_MLP::RPROP. See ANN_MLP::TrainingMethods.
  74. * @param param1 passed to setRpropDW0 for ANN_MLP::RPROP and to setBackpropWeightScale for ANN_MLP::BACKPROP and to initialT for ANN_MLP::ANNEAL.
  75. */
  76. - (void)setTrainMethod:(int)method param1:(double)param1 NS_SWIFT_NAME(setTrainMethod(method:param1:));
  77. /**
  78. * Sets training method and common parameters.
  79. * @param method Default value is ANN_MLP::RPROP. See ANN_MLP::TrainingMethods.
  80. */
  81. - (void)setTrainMethod:(int)method NS_SWIFT_NAME(setTrainMethod(method:));
  82. //
  83. // int cv::ml::ANN_MLP::getTrainMethod()
  84. //
  85. /**
  86. * Returns current training method
  87. */
  88. - (int)getTrainMethod NS_SWIFT_NAME(getTrainMethod());
  89. //
  90. // void cv::ml::ANN_MLP::setActivationFunction(int type, double param1 = 0, double param2 = 0)
  91. //
  92. /**
  93. * Initialize the activation function for each neuron.
  94. * Currently the default and the only fully supported activation function is ANN_MLP::SIGMOID_SYM.
  95. * @param type The type of activation function. See ANN_MLP::ActivationFunctions.
  96. * @param param1 The first parameter of the activation function, `$$\alpha$$`. Default value is 0.
  97. * @param param2 The second parameter of the activation function, `$$\beta$$`. Default value is 0.
  98. */
  99. - (void)setActivationFunction:(int)type param1:(double)param1 param2:(double)param2 NS_SWIFT_NAME(setActivationFunction(type:param1:param2:));
  100. /**
  101. * Initialize the activation function for each neuron.
  102. * Currently the default and the only fully supported activation function is ANN_MLP::SIGMOID_SYM.
  103. * @param type The type of activation function. See ANN_MLP::ActivationFunctions.
  104. * @param param1 The first parameter of the activation function, `$$\alpha$$`. Default value is 0.
  105. */
  106. - (void)setActivationFunction:(int)type param1:(double)param1 NS_SWIFT_NAME(setActivationFunction(type:param1:));
  107. /**
  108. * Initialize the activation function for each neuron.
  109. * Currently the default and the only fully supported activation function is ANN_MLP::SIGMOID_SYM.
  110. * @param type The type of activation function. See ANN_MLP::ActivationFunctions.
  111. */
  112. - (void)setActivationFunction:(int)type NS_SWIFT_NAME(setActivationFunction(type:));
  113. //
  114. // void cv::ml::ANN_MLP::setLayerSizes(Mat _layer_sizes)
  115. //
  116. /**
  117. * Integer vector specifying the number of neurons in each layer including the input and output layers.
  118. * The very first element specifies the number of elements in the input layer.
  119. * The last element - number of elements in the output layer. Default value is empty Mat.
  120. * @see `-getLayerSizes:`
  121. */
  122. - (void)setLayerSizes:(Mat*)_layer_sizes NS_SWIFT_NAME(setLayerSizes(_layer_sizes:));
  123. //
  124. // Mat cv::ml::ANN_MLP::getLayerSizes()
  125. //
  126. /**
  127. * Integer vector specifying the number of neurons in each layer including the input and output layers.
  128. * The very first element specifies the number of elements in the input layer.
  129. * The last element - number of elements in the output layer.
  130. * @see `-setLayerSizes:`
  131. */
  132. - (Mat*)getLayerSizes NS_SWIFT_NAME(getLayerSizes());
  133. //
  134. // TermCriteria cv::ml::ANN_MLP::getTermCriteria()
  135. //
  136. /**
  137. * @see `-setTermCriteria:`
  138. */
  139. - (TermCriteria*)getTermCriteria NS_SWIFT_NAME(getTermCriteria());
  140. //
  141. // void cv::ml::ANN_MLP::setTermCriteria(TermCriteria val)
  142. //
  143. /**
  144. * getTermCriteria @see `-getTermCriteria:`
  145. */
  146. - (void)setTermCriteria:(TermCriteria*)val NS_SWIFT_NAME(setTermCriteria(val:));
  147. //
  148. // double cv::ml::ANN_MLP::getBackpropWeightScale()
  149. //
  150. /**
  151. * @see `-setBackpropWeightScale:`
  152. */
  153. - (double)getBackpropWeightScale NS_SWIFT_NAME(getBackpropWeightScale());
  154. //
  155. // void cv::ml::ANN_MLP::setBackpropWeightScale(double val)
  156. //
  157. /**
  158. * getBackpropWeightScale @see `-getBackpropWeightScale:`
  159. */
  160. - (void)setBackpropWeightScale:(double)val NS_SWIFT_NAME(setBackpropWeightScale(val:));
  161. //
  162. // double cv::ml::ANN_MLP::getBackpropMomentumScale()
  163. //
  164. /**
  165. * @see `-setBackpropMomentumScale:`
  166. */
  167. - (double)getBackpropMomentumScale NS_SWIFT_NAME(getBackpropMomentumScale());
  168. //
  169. // void cv::ml::ANN_MLP::setBackpropMomentumScale(double val)
  170. //
  171. /**
  172. * getBackpropMomentumScale @see `-getBackpropMomentumScale:`
  173. */
  174. - (void)setBackpropMomentumScale:(double)val NS_SWIFT_NAME(setBackpropMomentumScale(val:));
  175. //
  176. // double cv::ml::ANN_MLP::getRpropDW0()
  177. //
  178. /**
  179. * @see `-setRpropDW0:`
  180. */
  181. - (double)getRpropDW0 NS_SWIFT_NAME(getRpropDW0());
  182. //
  183. // void cv::ml::ANN_MLP::setRpropDW0(double val)
  184. //
  185. /**
  186. * getRpropDW0 @see `-getRpropDW0:`
  187. */
  188. - (void)setRpropDW0:(double)val NS_SWIFT_NAME(setRpropDW0(val:));
  189. //
  190. // double cv::ml::ANN_MLP::getRpropDWPlus()
  191. //
  192. /**
  193. * @see `-setRpropDWPlus:`
  194. */
  195. - (double)getRpropDWPlus NS_SWIFT_NAME(getRpropDWPlus());
  196. //
  197. // void cv::ml::ANN_MLP::setRpropDWPlus(double val)
  198. //
  199. /**
  200. * getRpropDWPlus @see `-getRpropDWPlus:`
  201. */
  202. - (void)setRpropDWPlus:(double)val NS_SWIFT_NAME(setRpropDWPlus(val:));
  203. //
  204. // double cv::ml::ANN_MLP::getRpropDWMinus()
  205. //
  206. /**
  207. * @see `-setRpropDWMinus:`
  208. */
  209. - (double)getRpropDWMinus NS_SWIFT_NAME(getRpropDWMinus());
  210. //
  211. // void cv::ml::ANN_MLP::setRpropDWMinus(double val)
  212. //
  213. /**
  214. * getRpropDWMinus @see `-getRpropDWMinus:`
  215. */
  216. - (void)setRpropDWMinus:(double)val NS_SWIFT_NAME(setRpropDWMinus(val:));
  217. //
  218. // double cv::ml::ANN_MLP::getRpropDWMin()
  219. //
  220. /**
  221. * @see `-setRpropDWMin:`
  222. */
  223. - (double)getRpropDWMin NS_SWIFT_NAME(getRpropDWMin());
  224. //
  225. // void cv::ml::ANN_MLP::setRpropDWMin(double val)
  226. //
  227. /**
  228. * getRpropDWMin @see `-getRpropDWMin:`
  229. */
  230. - (void)setRpropDWMin:(double)val NS_SWIFT_NAME(setRpropDWMin(val:));
  231. //
  232. // double cv::ml::ANN_MLP::getRpropDWMax()
  233. //
  234. /**
  235. * @see `-setRpropDWMax:`
  236. */
  237. - (double)getRpropDWMax NS_SWIFT_NAME(getRpropDWMax());
  238. //
  239. // void cv::ml::ANN_MLP::setRpropDWMax(double val)
  240. //
  241. /**
  242. * getRpropDWMax @see `-getRpropDWMax:`
  243. */
  244. - (void)setRpropDWMax:(double)val NS_SWIFT_NAME(setRpropDWMax(val:));
  245. //
  246. // double cv::ml::ANN_MLP::getAnnealInitialT()
  247. //
  248. /**
  249. * @see `-setAnnealInitialT:`
  250. */
  251. - (double)getAnnealInitialT NS_SWIFT_NAME(getAnnealInitialT());
  252. //
  253. // void cv::ml::ANN_MLP::setAnnealInitialT(double val)
  254. //
  255. /**
  256. * getAnnealInitialT @see `-getAnnealInitialT:`
  257. */
  258. - (void)setAnnealInitialT:(double)val NS_SWIFT_NAME(setAnnealInitialT(val:));
  259. //
  260. // double cv::ml::ANN_MLP::getAnnealFinalT()
  261. //
  262. /**
  263. * @see `-setAnnealFinalT:`
  264. */
  265. - (double)getAnnealFinalT NS_SWIFT_NAME(getAnnealFinalT());
  266. //
  267. // void cv::ml::ANN_MLP::setAnnealFinalT(double val)
  268. //
  269. /**
  270. * getAnnealFinalT @see `-getAnnealFinalT:`
  271. */
  272. - (void)setAnnealFinalT:(double)val NS_SWIFT_NAME(setAnnealFinalT(val:));
  273. //
  274. // double cv::ml::ANN_MLP::getAnnealCoolingRatio()
  275. //
  276. /**
  277. * @see `-setAnnealCoolingRatio:`
  278. */
  279. - (double)getAnnealCoolingRatio NS_SWIFT_NAME(getAnnealCoolingRatio());
  280. //
  281. // void cv::ml::ANN_MLP::setAnnealCoolingRatio(double val)
  282. //
  283. /**
  284. * getAnnealCoolingRatio @see `-getAnnealCoolingRatio:`
  285. */
  286. - (void)setAnnealCoolingRatio:(double)val NS_SWIFT_NAME(setAnnealCoolingRatio(val:));
  287. //
  288. // int cv::ml::ANN_MLP::getAnnealItePerStep()
  289. //
  290. /**
  291. * @see `-setAnnealItePerStep:`
  292. */
  293. - (int)getAnnealItePerStep NS_SWIFT_NAME(getAnnealItePerStep());
  294. //
  295. // void cv::ml::ANN_MLP::setAnnealItePerStep(int val)
  296. //
  297. /**
  298. * getAnnealItePerStep @see `-getAnnealItePerStep:`
  299. */
  300. - (void)setAnnealItePerStep:(int)val NS_SWIFT_NAME(setAnnealItePerStep(val:));
  301. //
  302. // Mat cv::ml::ANN_MLP::getWeights(int layerIdx)
  303. //
  304. - (Mat*)getWeights:(int)layerIdx NS_SWIFT_NAME(getWeights(layerIdx:));
  305. //
  306. // static Ptr_ANN_MLP cv::ml::ANN_MLP::create()
  307. //
  308. /**
  309. * Creates empty model
  310. *
  311. * Use StatModel::train to train the model, Algorithm::load\<ANN_MLP\>(filename) to load the pre-trained model.
  312. * Note that the train method has optional flags: ANN_MLP::TrainFlags.
  313. */
  314. + (ANN_MLP*)create NS_SWIFT_NAME(create());
  315. //
  316. // static Ptr_ANN_MLP cv::ml::ANN_MLP::load(String filepath)
  317. //
  318. /**
  319. * Loads and creates a serialized ANN from a file
  320. *
  321. * Use ANN::save to serialize and store an ANN to disk.
  322. * Load the ANN from this file again, by calling this function with the path to the file.
  323. *
  324. * @param filepath path to serialized ANN
  325. */
  326. + (ANN_MLP*)load:(NSString*)filepath NS_SWIFT_NAME(load(filepath:));
  327. @end
  328. NS_ASSUME_NONNULL_END