SVM.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602
  1. //
  2. // This file is auto-generated. Please don't modify it!
  3. //
  4. #pragma once
  5. #ifdef __cplusplus
  6. //#import "opencv.hpp"
  7. #import "opencv2/ml.hpp"
  8. #else
  9. #define CV_EXPORTS
  10. #endif
  11. #import <Foundation/Foundation.h>
  12. #import "StatModel.h"
  13. @class Mat;
  14. @class ParamGrid;
  15. @class TermCriteria;
  16. // C++: enum KernelTypes (cv.ml.SVM.KernelTypes)
  17. typedef NS_ENUM(int, KernelTypes) {
  18. CUSTOM = -1,
  19. LINEAR = 0,
  20. POLY = 1,
  21. RBF = 2,
  22. SIGMOID = 3,
  23. CHI2 = 4,
  24. INTER = 5
  25. };
  26. // C++: enum ParamTypes (cv.ml.SVM.ParamTypes)
  27. typedef NS_ENUM(int, ParamTypes) {
  28. C = 0,
  29. GAMMA = 1,
  30. P = 2,
  31. NU = 3,
  32. COEF = 4,
  33. DEGREE = 5
  34. };
  35. // C++: enum SVMTypes (cv.ml.SVM.Types)
  36. typedef NS_ENUM(int, SVMTypes) {
  37. C_SVC = 100,
  38. NU_SVC = 101,
  39. ONE_CLASS = 102,
  40. EPS_SVR = 103,
  41. NU_SVR = 104
  42. };
  43. NS_ASSUME_NONNULL_BEGIN
  44. // C++: class SVM
  45. /**
  46. * Support Vector Machines.
  47. *
  48. * @see REF: ml_intro_svm
  49. *
  50. * Member of `Ml`
  51. */
  52. CV_EXPORTS @interface SVM : StatModel
  53. #ifdef __cplusplus
  54. @property(readonly)cv::Ptr<cv::ml::SVM> nativePtrSVM;
  55. #endif
  56. #ifdef __cplusplus
  57. - (instancetype)initWithNativePtr:(cv::Ptr<cv::ml::SVM>)nativePtr;
  58. + (instancetype)fromNative:(cv::Ptr<cv::ml::SVM>)nativePtr;
  59. #endif
  60. #pragma mark - Methods
  61. //
  62. // int cv::ml::SVM::getType()
  63. //
  64. /**
  65. * @see `-setType:`
  66. */
  67. - (int)getType NS_SWIFT_NAME(getType());
  68. //
  69. // void cv::ml::SVM::setType(int val)
  70. //
  71. /**
  72. * getType @see `-getType:`
  73. */
  74. - (void)setType:(int)val NS_SWIFT_NAME(setType(val:));
  75. //
  76. // double cv::ml::SVM::getGamma()
  77. //
  78. /**
  79. * @see `-setGamma:`
  80. */
  81. - (double)getGamma NS_SWIFT_NAME(getGamma());
  82. //
  83. // void cv::ml::SVM::setGamma(double val)
  84. //
  85. /**
  86. * getGamma @see `-getGamma:`
  87. */
  88. - (void)setGamma:(double)val NS_SWIFT_NAME(setGamma(val:));
  89. //
  90. // double cv::ml::SVM::getCoef0()
  91. //
  92. /**
  93. * @see `-setCoef0:`
  94. */
  95. - (double)getCoef0 NS_SWIFT_NAME(getCoef0());
  96. //
  97. // void cv::ml::SVM::setCoef0(double val)
  98. //
  99. /**
  100. * getCoef0 @see `-getCoef0:`
  101. */
  102. - (void)setCoef0:(double)val NS_SWIFT_NAME(setCoef0(val:));
  103. //
  104. // double cv::ml::SVM::getDegree()
  105. //
  106. /**
  107. * @see `-setDegree:`
  108. */
  109. - (double)getDegree NS_SWIFT_NAME(getDegree());
  110. //
  111. // void cv::ml::SVM::setDegree(double val)
  112. //
  113. /**
  114. * getDegree @see `-getDegree:`
  115. */
  116. - (void)setDegree:(double)val NS_SWIFT_NAME(setDegree(val:));
  117. //
  118. // double cv::ml::SVM::getC()
  119. //
  120. /**
  121. * @see `-setC:`
  122. */
  123. - (double)getC NS_SWIFT_NAME(getC());
  124. //
  125. // void cv::ml::SVM::setC(double val)
  126. //
  127. /**
  128. * getC @see `-getC:`
  129. */
  130. - (void)setC:(double)val NS_SWIFT_NAME(setC(val:));
  131. //
  132. // double cv::ml::SVM::getNu()
  133. //
  134. /**
  135. * @see `-setNu:`
  136. */
  137. - (double)getNu NS_SWIFT_NAME(getNu());
  138. //
  139. // void cv::ml::SVM::setNu(double val)
  140. //
  141. /**
  142. * getNu @see `-getNu:`
  143. */
  144. - (void)setNu:(double)val NS_SWIFT_NAME(setNu(val:));
  145. //
  146. // double cv::ml::SVM::getP()
  147. //
  148. /**
  149. * @see `-setP:`
  150. */
  151. - (double)getP NS_SWIFT_NAME(getP());
  152. //
  153. // void cv::ml::SVM::setP(double val)
  154. //
  155. /**
  156. * getP @see `-getP:`
  157. */
  158. - (void)setP:(double)val NS_SWIFT_NAME(setP(val:));
  159. //
  160. // Mat cv::ml::SVM::getClassWeights()
  161. //
  162. /**
  163. * @see `-setClassWeights:`
  164. */
  165. - (Mat*)getClassWeights NS_SWIFT_NAME(getClassWeights());
  166. //
  167. // void cv::ml::SVM::setClassWeights(Mat val)
  168. //
  169. /**
  170. * getClassWeights @see `-getClassWeights:`
  171. */
  172. - (void)setClassWeights:(Mat*)val NS_SWIFT_NAME(setClassWeights(val:));
  173. //
  174. // TermCriteria cv::ml::SVM::getTermCriteria()
  175. //
  176. /**
  177. * @see `-setTermCriteria:`
  178. */
  179. - (TermCriteria*)getTermCriteria NS_SWIFT_NAME(getTermCriteria());
  180. //
  181. // void cv::ml::SVM::setTermCriteria(TermCriteria val)
  182. //
  183. /**
  184. * getTermCriteria @see `-getTermCriteria:`
  185. */
  186. - (void)setTermCriteria:(TermCriteria*)val NS_SWIFT_NAME(setTermCriteria(val:));
  187. //
  188. // int cv::ml::SVM::getKernelType()
  189. //
  190. /**
  191. * Type of a %SVM kernel.
  192. * See SVM::KernelTypes. Default value is SVM::RBF.
  193. */
  194. - (int)getKernelType NS_SWIFT_NAME(getKernelType());
  195. //
  196. // void cv::ml::SVM::setKernel(int kernelType)
  197. //
  198. /**
  199. * Initialize with one of predefined kernels.
  200. * See SVM::KernelTypes.
  201. */
  202. - (void)setKernel:(int)kernelType NS_SWIFT_NAME(setKernel(kernelType:));
  203. //
  204. // bool cv::ml::SVM::trainAuto(Mat samples, int layout, Mat responses, int kFold = 10, Ptr_ParamGrid Cgrid = SVM::getDefaultGridPtr(SVM::C), Ptr_ParamGrid gammaGrid = SVM::getDefaultGridPtr(SVM::GAMMA), Ptr_ParamGrid pGrid = SVM::getDefaultGridPtr(SVM::P), Ptr_ParamGrid nuGrid = SVM::getDefaultGridPtr(SVM::NU), Ptr_ParamGrid coeffGrid = SVM::getDefaultGridPtr(SVM::COEF), Ptr_ParamGrid degreeGrid = SVM::getDefaultGridPtr(SVM::DEGREE), bool balanced = false)
  205. //
  206. /**
  207. * Trains an %SVM with optimal parameters
  208. *
  209. * @param samples training samples
  210. * @param layout See ml::SampleTypes.
  211. * @param responses vector of responses associated with the training samples.
  212. * @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  213. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  214. * @param Cgrid grid for C
  215. * @param gammaGrid grid for gamma
  216. * @param pGrid grid for p
  217. * @param nuGrid grid for nu
  218. * @param coeffGrid grid for coeff
  219. * @param degreeGrid grid for degree
  220. * @param balanced If true and the problem is 2-class classification then the method creates more
  221. * balanced cross-validation subsets that is proportions between classes in subsets are close
  222. * to such proportion in the whole train dataset.
  223. *
  224. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  225. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  226. * estimate of the test set error is minimal.
  227. *
  228. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  229. * offers rudimentary parameter options.
  230. *
  231. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  232. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  233. * the usual %SVM with parameters specified in params is executed.
  234. */
  235. - (BOOL)trainAuto:(Mat*)samples layout:(int)layout responses:(Mat*)responses kFold:(int)kFold Cgrid:(ParamGrid*)Cgrid gammaGrid:(ParamGrid*)gammaGrid pGrid:(ParamGrid*)pGrid nuGrid:(ParamGrid*)nuGrid coeffGrid:(ParamGrid*)coeffGrid degreeGrid:(ParamGrid*)degreeGrid balanced:(BOOL)balanced NS_SWIFT_NAME(trainAuto(samples:layout:responses:kFold:Cgrid:gammaGrid:pGrid:nuGrid:coeffGrid:degreeGrid:balanced:));
  236. /**
  237. * Trains an %SVM with optimal parameters
  238. *
  239. * @param samples training samples
  240. * @param layout See ml::SampleTypes.
  241. * @param responses vector of responses associated with the training samples.
  242. * @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  243. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  244. * @param Cgrid grid for C
  245. * @param gammaGrid grid for gamma
  246. * @param pGrid grid for p
  247. * @param nuGrid grid for nu
  248. * @param coeffGrid grid for coeff
  249. * @param degreeGrid grid for degree
  250. * balanced cross-validation subsets that is proportions between classes in subsets are close
  251. * to such proportion in the whole train dataset.
  252. *
  253. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  254. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  255. * estimate of the test set error is minimal.
  256. *
  257. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  258. * offers rudimentary parameter options.
  259. *
  260. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  261. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  262. * the usual %SVM with parameters specified in params is executed.
  263. */
  264. - (BOOL)trainAuto:(Mat*)samples layout:(int)layout responses:(Mat*)responses kFold:(int)kFold Cgrid:(ParamGrid*)Cgrid gammaGrid:(ParamGrid*)gammaGrid pGrid:(ParamGrid*)pGrid nuGrid:(ParamGrid*)nuGrid coeffGrid:(ParamGrid*)coeffGrid degreeGrid:(ParamGrid*)degreeGrid NS_SWIFT_NAME(trainAuto(samples:layout:responses:kFold:Cgrid:gammaGrid:pGrid:nuGrid:coeffGrid:degreeGrid:));
  265. /**
  266. * Trains an %SVM with optimal parameters
  267. *
  268. * @param samples training samples
  269. * @param layout See ml::SampleTypes.
  270. * @param responses vector of responses associated with the training samples.
  271. * @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  272. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  273. * @param Cgrid grid for C
  274. * @param gammaGrid grid for gamma
  275. * @param pGrid grid for p
  276. * @param nuGrid grid for nu
  277. * @param coeffGrid grid for coeff
  278. * balanced cross-validation subsets that is proportions between classes in subsets are close
  279. * to such proportion in the whole train dataset.
  280. *
  281. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  282. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  283. * estimate of the test set error is minimal.
  284. *
  285. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  286. * offers rudimentary parameter options.
  287. *
  288. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  289. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  290. * the usual %SVM with parameters specified in params is executed.
  291. */
  292. - (BOOL)trainAuto:(Mat*)samples layout:(int)layout responses:(Mat*)responses kFold:(int)kFold Cgrid:(ParamGrid*)Cgrid gammaGrid:(ParamGrid*)gammaGrid pGrid:(ParamGrid*)pGrid nuGrid:(ParamGrid*)nuGrid coeffGrid:(ParamGrid*)coeffGrid NS_SWIFT_NAME(trainAuto(samples:layout:responses:kFold:Cgrid:gammaGrid:pGrid:nuGrid:coeffGrid:));
  293. /**
  294. * Trains an %SVM with optimal parameters
  295. *
  296. * @param samples training samples
  297. * @param layout See ml::SampleTypes.
  298. * @param responses vector of responses associated with the training samples.
  299. * @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  300. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  301. * @param Cgrid grid for C
  302. * @param gammaGrid grid for gamma
  303. * @param pGrid grid for p
  304. * @param nuGrid grid for nu
  305. * balanced cross-validation subsets that is proportions between classes in subsets are close
  306. * to such proportion in the whole train dataset.
  307. *
  308. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  309. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  310. * estimate of the test set error is minimal.
  311. *
  312. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  313. * offers rudimentary parameter options.
  314. *
  315. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  316. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  317. * the usual %SVM with parameters specified in params is executed.
  318. */
  319. - (BOOL)trainAuto:(Mat*)samples layout:(int)layout responses:(Mat*)responses kFold:(int)kFold Cgrid:(ParamGrid*)Cgrid gammaGrid:(ParamGrid*)gammaGrid pGrid:(ParamGrid*)pGrid nuGrid:(ParamGrid*)nuGrid NS_SWIFT_NAME(trainAuto(samples:layout:responses:kFold:Cgrid:gammaGrid:pGrid:nuGrid:));
  320. /**
  321. * Trains an %SVM with optimal parameters
  322. *
  323. * @param samples training samples
  324. * @param layout See ml::SampleTypes.
  325. * @param responses vector of responses associated with the training samples.
  326. * @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  327. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  328. * @param Cgrid grid for C
  329. * @param gammaGrid grid for gamma
  330. * @param pGrid grid for p
  331. * balanced cross-validation subsets that is proportions between classes in subsets are close
  332. * to such proportion in the whole train dataset.
  333. *
  334. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  335. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  336. * estimate of the test set error is minimal.
  337. *
  338. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  339. * offers rudimentary parameter options.
  340. *
  341. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  342. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  343. * the usual %SVM with parameters specified in params is executed.
  344. */
  345. - (BOOL)trainAuto:(Mat*)samples layout:(int)layout responses:(Mat*)responses kFold:(int)kFold Cgrid:(ParamGrid*)Cgrid gammaGrid:(ParamGrid*)gammaGrid pGrid:(ParamGrid*)pGrid NS_SWIFT_NAME(trainAuto(samples:layout:responses:kFold:Cgrid:gammaGrid:pGrid:));
  346. /**
  347. * Trains an %SVM with optimal parameters
  348. *
  349. * @param samples training samples
  350. * @param layout See ml::SampleTypes.
  351. * @param responses vector of responses associated with the training samples.
  352. * @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  353. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  354. * @param Cgrid grid for C
  355. * @param gammaGrid grid for gamma
  356. * balanced cross-validation subsets that is proportions between classes in subsets are close
  357. * to such proportion in the whole train dataset.
  358. *
  359. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  360. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  361. * estimate of the test set error is minimal.
  362. *
  363. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  364. * offers rudimentary parameter options.
  365. *
  366. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  367. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  368. * the usual %SVM with parameters specified in params is executed.
  369. */
  370. - (BOOL)trainAuto:(Mat*)samples layout:(int)layout responses:(Mat*)responses kFold:(int)kFold Cgrid:(ParamGrid*)Cgrid gammaGrid:(ParamGrid*)gammaGrid NS_SWIFT_NAME(trainAuto(samples:layout:responses:kFold:Cgrid:gammaGrid:));
  371. /**
  372. * Trains an %SVM with optimal parameters
  373. *
  374. * @param samples training samples
  375. * @param layout See ml::SampleTypes.
  376. * @param responses vector of responses associated with the training samples.
  377. * @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  378. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  379. * @param Cgrid grid for C
  380. * balanced cross-validation subsets that is proportions between classes in subsets are close
  381. * to such proportion in the whole train dataset.
  382. *
  383. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  384. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  385. * estimate of the test set error is minimal.
  386. *
  387. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  388. * offers rudimentary parameter options.
  389. *
  390. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  391. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  392. * the usual %SVM with parameters specified in params is executed.
  393. */
  394. - (BOOL)trainAuto:(Mat*)samples layout:(int)layout responses:(Mat*)responses kFold:(int)kFold Cgrid:(ParamGrid*)Cgrid NS_SWIFT_NAME(trainAuto(samples:layout:responses:kFold:Cgrid:));
  395. /**
  396. * Trains an %SVM with optimal parameters
  397. *
  398. * @param samples training samples
  399. * @param layout See ml::SampleTypes.
  400. * @param responses vector of responses associated with the training samples.
  401. * @param kFold Cross-validation parameter. The training set is divided into kFold subsets. One
  402. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  403. * balanced cross-validation subsets that is proportions between classes in subsets are close
  404. * to such proportion in the whole train dataset.
  405. *
  406. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  407. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  408. * estimate of the test set error is minimal.
  409. *
  410. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  411. * offers rudimentary parameter options.
  412. *
  413. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  414. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  415. * the usual %SVM with parameters specified in params is executed.
  416. */
  417. - (BOOL)trainAuto:(Mat*)samples layout:(int)layout responses:(Mat*)responses kFold:(int)kFold NS_SWIFT_NAME(trainAuto(samples:layout:responses:kFold:));
  418. /**
  419. * Trains an %SVM with optimal parameters
  420. *
  421. * @param samples training samples
  422. * @param layout See ml::SampleTypes.
  423. * @param responses vector of responses associated with the training samples.
  424. * subset is used to test the model, the others form the train set. So, the %SVM algorithm is
  425. * balanced cross-validation subsets that is proportions between classes in subsets are close
  426. * to such proportion in the whole train dataset.
  427. *
  428. * The method trains the %SVM model automatically by choosing the optimal parameters C, gamma, p,
  429. * nu, coef0, degree. Parameters are considered optimal when the cross-validation
  430. * estimate of the test set error is minimal.
  431. *
  432. * This function only makes use of SVM::getDefaultGrid for parameter optimization and thus only
  433. * offers rudimentary parameter options.
  434. *
  435. * This function works for the classification (SVM::C_SVC or SVM::NU_SVC) as well as for the
  436. * regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
  437. * the usual %SVM with parameters specified in params is executed.
  438. */
  439. - (BOOL)trainAuto:(Mat*)samples layout:(int)layout responses:(Mat*)responses NS_SWIFT_NAME(trainAuto(samples:layout:responses:));
  440. //
  441. // Mat cv::ml::SVM::getSupportVectors()
  442. //
  443. /**
  444. * Retrieves all the support vectors
  445. *
  446. * The method returns all the support vectors as a floating-point matrix, where support vectors are
  447. * stored as matrix rows.
  448. */
  449. - (Mat*)getSupportVectors NS_SWIFT_NAME(getSupportVectors());
  450. //
  451. // Mat cv::ml::SVM::getUncompressedSupportVectors()
  452. //
  453. /**
  454. * Retrieves all the uncompressed support vectors of a linear %SVM
  455. *
  456. * The method returns all the uncompressed support vectors of a linear %SVM that the compressed
  457. * support vector, used for prediction, was derived from. They are returned in a floating-point
  458. * matrix, where the support vectors are stored as matrix rows.
  459. */
  460. - (Mat*)getUncompressedSupportVectors NS_SWIFT_NAME(getUncompressedSupportVectors());
  461. //
  462. // double cv::ml::SVM::getDecisionFunction(int i, Mat& alpha, Mat& svidx)
  463. //
  464. /**
  465. * Retrieves the decision function
  466. *
  467. * @param i the index of the decision function. If the problem solved is regression, 1-class or
  468. * 2-class classification, then there will be just one decision function and the index should
  469. * always be 0. Otherwise, in the case of N-class classification, there will be `$$N(N-1)/2$$`
  470. * decision functions.
  471. * @param alpha the optional output vector for weights, corresponding to different support vectors.
  472. * In the case of linear %SVM all the alpha's will be 1's.
  473. * @param svidx the optional output vector of indices of support vectors within the matrix of
  474. * support vectors (which can be retrieved by SVM::getSupportVectors). In the case of linear
  475. * %SVM each decision function consists of a single "compressed" support vector.
  476. *
  477. * The method returns rho parameter of the decision function, a scalar subtracted from the weighted
  478. * sum of kernel responses.
  479. */
  480. - (double)getDecisionFunction:(int)i alpha:(Mat*)alpha svidx:(Mat*)svidx NS_SWIFT_NAME(getDecisionFunction(i:alpha:svidx:));
  481. //
  482. // static Ptr_ParamGrid cv::ml::SVM::getDefaultGridPtr(int param_id)
  483. //
  484. /**
  485. * Generates a grid for %SVM parameters.
  486. *
  487. * @param param_id %SVM parameters IDs that must be one of the SVM::ParamTypes. The grid is
  488. * generated for the parameter with this ID.
  489. *
  490. * The function generates a grid pointer for the specified parameter of the %SVM algorithm.
  491. * The grid may be passed to the function SVM::trainAuto.
  492. */
  493. + (ParamGrid*)getDefaultGridPtr:(int)param_id NS_SWIFT_NAME(getDefaultGridPtr(param_id:));
  494. //
  495. // static Ptr_SVM cv::ml::SVM::create()
  496. //
  497. /**
  498. * Creates empty model.
  499. * Use StatModel::train to train the model. Since %SVM has several parameters, you may want to
  500. * find the best parameters for your problem, it can be done with SVM::trainAuto.
  501. */
  502. + (SVM*)create NS_SWIFT_NAME(create());
  503. //
  504. // static Ptr_SVM cv::ml::SVM::load(String filepath)
  505. //
  506. /**
  507. * Loads and creates a serialized svm from a file
  508. *
  509. * Use SVM::save to serialize and store an SVM to disk.
  510. * Load the SVM from this file again, by calling this function with the path to the file.
  511. *
  512. * @param filepath path to serialized svm
  513. */
  514. + (SVM*)load:(NSString*)filepath NS_SWIFT_NAME(load(filepath:));
  515. @end
  516. NS_ASSUME_NONNULL_END