arm64-apple-ios.swiftinterface 58 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937
  1. // swift-interface-format-version: 1.0
  2. // swift-compiler-version: Apple Swift version 5.4 (swiftlang-1205.0.26.9 clang-1205.0.19.55)
  3. // swift-module-flags: -target arm64-apple-ios9.0 -enable-objc-interop -enable-library-evolution -swift-version 5 -enforce-exclusivity=checked -O -module-name opencv2
  4. import Foundation
  5. import Swift
  6. @_exported import opencv2
  7. extension Calib3d {
  8. @nonobjc public class func solveP3P(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Int32
  9. }
  10. extension Calib3d {
  11. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod, rvec: opencv2.Mat, tvec: opencv2.Mat, reprojectionError: opencv2.Mat) -> Swift.Int32
  12. }
  13. extension Calib3d {
  14. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod, rvec: opencv2.Mat, tvec: opencv2.Mat) -> Swift.Int32
  15. }
  16. extension Calib3d {
  17. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod, rvec: opencv2.Mat) -> Swift.Int32
  18. }
  19. extension Calib3d {
  20. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool, flags: opencv2.SolvePnPMethod) -> Swift.Int32
  21. }
  22. extension Calib3d {
  23. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], useExtrinsicGuess: Swift.Bool) -> Swift.Int32
  24. }
  25. extension Calib3d {
  26. @nonobjc public class func solvePnPGeneric(objectPoints: opencv2.Mat, imagePoints: opencv2.Mat, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Int32
  27. }
  28. extension Calib3d {
  29. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  30. }
  31. extension Calib3d {
  32. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
  33. }
  34. extension Calib3d {
  35. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, perViewErrors: opencv2.Mat) -> Swift.Double
  36. }
  37. extension Calib3d {
  38. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  39. }
  40. extension Calib3d {
  41. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
  42. }
  43. extension Calib3d {
  44. @nonobjc public class func calibrateCamera(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
  45. }
  46. extension Calib3d {
  47. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, stdDeviationsObjPoints: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  48. }
  49. extension Calib3d {
  50. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, stdDeviationsObjPoints: opencv2.Mat, perViewErrors: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
  51. }
  52. extension Calib3d {
  53. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, stdDeviationsIntrinsics: opencv2.Mat, stdDeviationsExtrinsics: opencv2.Mat, stdDeviationsObjPoints: opencv2.Mat, perViewErrors: opencv2.Mat) -> Swift.Double
  54. }
  55. extension Calib3d {
  56. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  57. }
  58. extension Calib3d {
  59. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat, flags: Swift.Int32) -> Swift.Double
  60. }
  61. extension Calib3d {
  62. @nonobjc public class func calibrateCameraRO(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], imageSize: opencv2.Size2i, iFixedPoint: Swift.Int32, cameraMatrix: opencv2.Mat, distCoeffs: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], newObjPoints: opencv2.Mat) -> Swift.Double
  63. }
  64. extension Calib3d {
  65. @nonobjc public class func decomposeHomographyMat(H: opencv2.Mat, K: opencv2.Mat, rotations: inout [opencv2.Mat], translations: inout [opencv2.Mat], normals: inout [opencv2.Mat]) -> Swift.Int32
  66. }
  67. extension Calib3d {
  68. @nonobjc public class func calibrate(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], image_size: opencv2.Size2i, K: opencv2.Mat, D: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32, criteria: opencv2.TermCriteria) -> Swift.Double
  69. }
  70. extension Calib3d {
  71. @nonobjc public class func calibrate(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], image_size: opencv2.Size2i, K: opencv2.Mat, D: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Double
  72. }
  73. extension Calib3d {
  74. @nonobjc public class func calibrate(objectPoints: [opencv2.Mat], imagePoints: [opencv2.Mat], image_size: opencv2.Size2i, K: opencv2.Mat, D: opencv2.Mat, rvecs: inout [opencv2.Mat], tvecs: inout [opencv2.Mat]) -> Swift.Double
  75. }
  76. extension ByteVector {
  77. public convenience init(_ array: [Swift.Int8])
  78. public convenience init(_ array: [Swift.UInt8])
  79. public subscript(index: Swift.Int) -> Swift.Int8 {
  80. get
  81. }
  82. public var array: [Swift.Int8] {
  83. get
  84. }
  85. public var unsignedArray: [Swift.UInt8] {
  86. get
  87. }
  88. }
  89. extension ByteVector : Swift.Sequence {
  90. public typealias Iterator = opencv2.ByteVectorIterator
  91. public func makeIterator() -> opencv2.ByteVectorIterator
  92. public typealias Element = opencv2.ByteVectorIterator.Element
  93. }
  94. public struct ByteVectorIterator : Swift.IteratorProtocol {
  95. public typealias Element = Swift.Int8
  96. public mutating func next() -> Swift.Int8?
  97. }
  98. extension Core {
  99. @nonobjc public class func meanStdDev(src: opencv2.Mat, mean: inout [Swift.Double], stddev: inout [Swift.Double], mask: opencv2.Mat)
  100. }
  101. extension Core {
  102. @nonobjc public class func meanStdDev(src: opencv2.Mat, mean: inout [Swift.Double], stddev: inout [Swift.Double])
  103. }
  104. extension Core {
  105. @nonobjc public class func split(m: opencv2.Mat, mv: inout [opencv2.Mat])
  106. }
  107. extension Core {
  108. @nonobjc public class func mixChannels(src: [opencv2.Mat], dst: [opencv2.Mat], fromTo: [Swift.Int32])
  109. }
  110. extension Core {
  111. @nonobjc public class func transposeND(src: opencv2.Mat, order: [Swift.Int32], dst: opencv2.Mat)
  112. }
  113. extension CvType {
  114. public static var CV_8U: Swift.Int32
  115. public static var CV_8S: Swift.Int32
  116. public static var CV_16U: Swift.Int32
  117. public static var CV_16S: Swift.Int32
  118. public static var CV_32S: Swift.Int32
  119. public static var CV_32F: Swift.Int32
  120. public static var CV_64F: Swift.Int32
  121. public static var CV_16F: Swift.Int32
  122. public static var CV_8UC1: Swift.Int32
  123. public static var CV_8UC2: Swift.Int32
  124. public static var CV_8UC3: Swift.Int32
  125. public static var CV_8UC4: Swift.Int32
  126. public static var CV_8SC1: Swift.Int32
  127. public static var CV_8SC2: Swift.Int32
  128. public static var CV_8SC3: Swift.Int32
  129. public static var CV_8SC4: Swift.Int32
  130. public static var CV_16UC1: Swift.Int32
  131. public static var CV_16UC2: Swift.Int32
  132. public static var CV_16UC3: Swift.Int32
  133. public static var CV_16UC4: Swift.Int32
  134. public static var CV_16SC1: Swift.Int32
  135. public static var CV_16SC2: Swift.Int32
  136. public static var CV_16SC3: Swift.Int32
  137. public static var CV_16SC4: Swift.Int32
  138. public static var CV_32SC1: Swift.Int32
  139. public static var CV_32SC2: Swift.Int32
  140. public static var CV_32SC3: Swift.Int32
  141. public static var CV_32SC4: Swift.Int32
  142. public static var CV_32FC1: Swift.Int32
  143. public static var CV_32FC2: Swift.Int32
  144. public static var CV_32FC3: Swift.Int32
  145. public static var CV_32FC4: Swift.Int32
  146. public static var CV_64FC1: Swift.Int32
  147. public static var CV_64FC2: Swift.Int32
  148. public static var CV_64FC3: Swift.Int32
  149. public static var CV_64FC4: Swift.Int32
  150. public static var CV_16FC1: Swift.Int32
  151. public static var CV_16FC2: Swift.Int32
  152. public static var CV_16FC3: Swift.Int32
  153. public static var CV_16FC4: Swift.Int32
  154. public static var CV_CN_MAX: Swift.Int
  155. public static var CV_CN_SHIFT: Swift.Int
  156. public static var CV_DEPTH_MAX: Swift.Int
  157. public static func CV_8UC(_ channels: Swift.Int32) -> Swift.Int32
  158. public static func CV_8SC(_ channels: Swift.Int32) -> Swift.Int32
  159. public static func CV_16UC(_ channels: Swift.Int32) -> Swift.Int32
  160. public static func CV_16SC(_ channels: Swift.Int32) -> Swift.Int32
  161. public static func CV_32SC(_ channels: Swift.Int32) -> Swift.Int32
  162. public static func CV_32FC(_ channels: Swift.Int32) -> Swift.Int32
  163. public static func CV_64FC(_ channels: Swift.Int32) -> Swift.Int32
  164. public static func CV_16FC(_ channels: Swift.Int32) -> Swift.Int32
  165. }
  166. extension DoubleVector {
  167. public convenience init(_ array: [Swift.Double])
  168. public subscript(index: Swift.Int) -> Swift.Double {
  169. get
  170. }
  171. public var array: [Swift.Double] {
  172. get
  173. }
  174. }
  175. extension DoubleVector : Swift.Sequence {
  176. public typealias Iterator = opencv2.DoubleVectorIterator
  177. public func makeIterator() -> opencv2.DoubleVectorIterator
  178. public typealias Element = opencv2.DoubleVectorIterator.Element
  179. }
  180. public struct DoubleVectorIterator : Swift.IteratorProtocol {
  181. public typealias Element = Swift.Double
  182. public mutating func next() -> Swift.Double?
  183. }
  184. extension FloatVector {
  185. public convenience init(_ array: [Swift.Float])
  186. public subscript(index: Swift.Int) -> Swift.Float {
  187. get
  188. }
  189. public var array: [Swift.Float] {
  190. get
  191. }
  192. }
  193. extension FloatVector : Swift.Sequence {
  194. public typealias Iterator = opencv2.FloatVectorIterator
  195. public func makeIterator() -> opencv2.FloatVectorIterator
  196. public typealias Element = opencv2.FloatVectorIterator.Element
  197. }
  198. public struct FloatVectorIterator : Swift.IteratorProtocol {
  199. public typealias Element = Swift.Float
  200. public mutating func next() -> Swift.Float?
  201. }
  202. extension IntVector {
  203. public convenience init(_ array: [Swift.Int32])
  204. public subscript(index: Swift.Int) -> Swift.Int32 {
  205. get
  206. }
  207. public var array: [Swift.Int32] {
  208. get
  209. }
  210. }
  211. extension IntVector : Swift.Sequence {
  212. public typealias Iterator = opencv2.IntVectorIterator
  213. public func makeIterator() -> opencv2.IntVectorIterator
  214. public typealias Element = opencv2.IntVectorIterator.Element
  215. }
  216. public struct IntVectorIterator : Swift.IteratorProtocol {
  217. public typealias Element = Swift.Int32
  218. public mutating func next() -> Swift.Int32?
  219. }
  220. public typealias T2<T> = (T, T)
  221. public typealias T3<T> = (T, T, T)
  222. public typealias T4<T> = (T, T, T, T)
  223. extension Mat {
  224. public convenience init(rows: Swift.Int32, cols: Swift.Int32, type: Swift.Int32, data: [Swift.Int8])
  225. public convenience init(rows: Swift.Int32, cols: Swift.Int32, type: Swift.Int32, data: [Swift.Int8], step: Swift.Int)
  226. @discardableResult
  227. public func get(indices: [Swift.Int32], data: inout [Swift.Int8]) throws -> Swift.Int32
  228. @discardableResult
  229. public func get(indices: [Swift.Int32], data: inout [Swift.UInt8]) throws -> Swift.Int32
  230. @discardableResult
  231. public func get(indices: [Swift.Int32], data: inout [Swift.Double]) throws -> Swift.Int32
  232. @discardableResult
  233. public func get(indices: [Swift.Int32], data: inout [Swift.Float]) throws -> Swift.Int32
  234. @discardableResult
  235. public func get(indices: [Swift.Int32], data: inout [Swift.Int32]) throws -> Swift.Int32
  236. @discardableResult
  237. public func get(indices: [Swift.Int32], data: inout [Swift.Int16]) throws -> Swift.Int32
  238. @discardableResult
  239. public func get(indices: [Swift.Int32], data: inout [Swift.UInt16]) throws -> Swift.Int32
  240. @discardableResult
  241. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Int8]) throws -> Swift.Int32
  242. @discardableResult
  243. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.UInt8]) throws -> Swift.Int32
  244. @discardableResult
  245. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Double]) throws -> Swift.Int32
  246. @discardableResult
  247. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Float]) throws -> Swift.Int32
  248. @discardableResult
  249. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Int32]) throws -> Swift.Int32
  250. @discardableResult
  251. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.Int16]) throws -> Swift.Int32
  252. @discardableResult
  253. public func get(row: Swift.Int32, col: Swift.Int32, data: inout [Swift.UInt16]) throws -> Swift.Int32
  254. @discardableResult
  255. public func put(indices: [Swift.Int32], data: [Swift.Int8]) throws -> Swift.Int32
  256. @discardableResult
  257. public func put(indices: [Swift.Int32], data: [Swift.UInt8]) throws -> Swift.Int32
  258. @discardableResult
  259. public func put(indices: [Swift.Int32], data: [Swift.Int8], offset: Swift.Int, length: Swift.Int32) throws -> Swift.Int32
  260. @discardableResult
  261. public func put(indices: [Swift.Int32], data: [Swift.Double]) throws -> Swift.Int32
  262. @discardableResult
  263. public func put(indices: [Swift.Int32], data: [Swift.Float]) throws -> Swift.Int32
  264. @discardableResult
  265. public func put(indices: [Swift.Int32], data: [Swift.Int32]) throws -> Swift.Int32
  266. @discardableResult
  267. public func put(indices: [Swift.Int32], data: [Swift.Int16]) throws -> Swift.Int32
  268. @discardableResult
  269. public func put(indices: [Swift.Int32], data: [Swift.UInt16]) throws -> Swift.Int32
  270. @discardableResult
  271. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int8]) throws -> Swift.Int32
  272. @discardableResult
  273. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.UInt8]) throws -> Swift.Int32
  274. @discardableResult
  275. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int8], offset: Swift.Int, length: Swift.Int32) throws -> Swift.Int32
  276. @discardableResult
  277. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Double]) throws -> Swift.Int32
  278. @discardableResult
  279. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Float]) throws -> Swift.Int32
  280. @discardableResult
  281. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int32]) throws -> Swift.Int32
  282. @discardableResult
  283. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.Int16]) throws -> Swift.Int32
  284. @discardableResult
  285. public func put(row: Swift.Int32, col: Swift.Int32, data: [Swift.UInt16]) throws -> Swift.Int32
  286. @discardableResult
  287. public func get(row: Swift.Int32, col: Swift.Int32) -> [Swift.Double]
  288. @discardableResult
  289. public func get(indices: [Swift.Int32]) -> [Swift.Double]
  290. }
  291. public protocol Atable {
  292. static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Self
  293. static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Self)
  294. static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Self, Self)
  295. static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Self, Self))
  296. static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Self, Self, Self)
  297. static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Self, Self, Self))
  298. static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Self, Self, Self, Self)
  299. static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Self, Self, Self, Self))
  300. }
  301. @_hasMissingDesignatedInitializers public class MatAt<N> where N : opencv2.Atable {
  302. public var v: N {
  303. get
  304. set(value)
  305. }
  306. public var v2c: (N, N) {
  307. get
  308. set(value)
  309. }
  310. public var v3c: (N, N, N) {
  311. get
  312. set(value)
  313. }
  314. public var v4c: (N, N, N, N) {
  315. get
  316. set(value)
  317. }
  318. @objc deinit
  319. }
  320. extension UInt8 : opencv2.Atable {
  321. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.UInt8
  322. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.UInt8)
  323. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt8, Swift.UInt8)
  324. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt8, Swift.UInt8))
  325. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt8, Swift.UInt8, Swift.UInt8)
  326. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt8, Swift.UInt8, Swift.UInt8))
  327. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt8, Swift.UInt8, Swift.UInt8, Swift.UInt8)
  328. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt8, Swift.UInt8, Swift.UInt8, Swift.UInt8))
  329. }
  330. extension Int8 : opencv2.Atable {
  331. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Int8
  332. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Int8)
  333. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int8, Swift.Int8)
  334. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int8, Swift.Int8))
  335. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int8, Swift.Int8, Swift.Int8)
  336. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int8, Swift.Int8, Swift.Int8))
  337. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int8, Swift.Int8, Swift.Int8, Swift.Int8)
  338. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int8, Swift.Int8, Swift.Int8, Swift.Int8))
  339. }
  340. extension Double : opencv2.Atable {
  341. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Double
  342. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Double)
  343. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Double, Swift.Double)
  344. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Double, Swift.Double))
  345. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Double, Swift.Double, Swift.Double)
  346. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Double, Swift.Double, Swift.Double))
  347. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Double, Swift.Double, Swift.Double, Swift.Double)
  348. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Double, Swift.Double, Swift.Double, Swift.Double))
  349. }
  350. extension Float : opencv2.Atable {
  351. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Float
  352. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Float)
  353. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Float, Swift.Float)
  354. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Float, Swift.Float))
  355. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Float, Swift.Float, Swift.Float)
  356. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Float, Swift.Float, Swift.Float))
  357. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Float, Swift.Float, Swift.Float, Swift.Float)
  358. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Float, Swift.Float, Swift.Float, Swift.Float))
  359. }
  360. extension Int32 : opencv2.Atable {
  361. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Int32
  362. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Int32)
  363. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int32, Swift.Int32)
  364. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int32, Swift.Int32))
  365. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int32, Swift.Int32, Swift.Int32)
  366. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int32, Swift.Int32, Swift.Int32))
  367. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int32, Swift.Int32, Swift.Int32, Swift.Int32)
  368. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int32, Swift.Int32, Swift.Int32, Swift.Int32))
  369. }
  370. extension UInt16 : opencv2.Atable {
  371. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.UInt16
  372. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.UInt16)
  373. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt16, Swift.UInt16)
  374. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt16, Swift.UInt16))
  375. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt16, Swift.UInt16, Swift.UInt16)
  376. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt16, Swift.UInt16, Swift.UInt16))
  377. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.UInt16, Swift.UInt16, Swift.UInt16, Swift.UInt16)
  378. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.UInt16, Swift.UInt16, Swift.UInt16, Swift.UInt16))
  379. }
  380. extension Int16 : opencv2.Atable {
  381. public static func getAt(m: opencv2.Mat, indices: [Swift.Int32]) -> Swift.Int16
  382. public static func putAt(m: opencv2.Mat, indices: [Swift.Int32], v: Swift.Int16)
  383. public static func getAt2c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int16, Swift.Int16)
  384. public static func putAt2c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int16, Swift.Int16))
  385. public static func getAt3c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int16, Swift.Int16, Swift.Int16)
  386. public static func putAt3c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int16, Swift.Int16, Swift.Int16))
  387. public static func getAt4c(m: opencv2.Mat, indices: [Swift.Int32]) -> (Swift.Int16, Swift.Int16, Swift.Int16, Swift.Int16)
  388. public static func putAt4c(m: opencv2.Mat, indices: [Swift.Int32], v: (Swift.Int16, Swift.Int16, Swift.Int16, Swift.Int16))
  389. }
  390. extension Mat {
  391. public func at<N>(row: Swift.Int32, col: Swift.Int32) -> opencv2.MatAt<N> where N : opencv2.Atable
  392. public func at<N>(indices: [Swift.Int32]) -> opencv2.MatAt<N> where N : opencv2.Atable
  393. }
  394. extension Mat {
  395. public static func * (lhs: opencv2.Mat, rhs: opencv2.Mat) -> opencv2.Mat
  396. }
  397. public typealias Rect = opencv2.Rect2i
  398. public typealias Point = opencv2.Point2i
  399. public typealias Size = opencv2.Size2i
  400. extension Dnn {
  401. @nonobjc public class func readNetFromDarknet(bufferCfg: [Swift.UInt8], bufferModel: [Swift.UInt8]) -> opencv2.Net
  402. }
  403. extension Dnn {
  404. @nonobjc public class func readNetFromDarknet(bufferCfg: [Swift.UInt8]) -> opencv2.Net
  405. }
  406. extension Dnn {
  407. @nonobjc public class func readNetFromCaffe(bufferProto: [Swift.UInt8], bufferModel: [Swift.UInt8]) -> opencv2.Net
  408. }
  409. extension Dnn {
  410. @nonobjc public class func readNetFromCaffe(bufferProto: [Swift.UInt8]) -> opencv2.Net
  411. }
  412. extension Dnn {
  413. @nonobjc public class func readNetFromTensorflow(bufferModel: [Swift.UInt8], bufferConfig: [Swift.UInt8]) -> opencv2.Net
  414. }
  415. extension Dnn {
  416. @nonobjc public class func readNetFromTensorflow(bufferModel: [Swift.UInt8]) -> opencv2.Net
  417. }
  418. extension Dnn {
  419. @nonobjc public class func readNet(framework: Swift.String, bufferModel: [Swift.UInt8], bufferConfig: [Swift.UInt8]) -> opencv2.Net
  420. }
  421. extension Dnn {
  422. @nonobjc public class func readNet(framework: Swift.String, bufferModel: [Swift.UInt8]) -> opencv2.Net
  423. }
  424. extension Dnn {
  425. @nonobjc public class func readNetFromModelOptimizer(bufferModelConfig: [Swift.UInt8], bufferWeights: [Swift.UInt8]) -> opencv2.Net
  426. }
  427. extension Dnn {
  428. @nonobjc public class func readNetFromONNX(buffer: [Swift.UInt8]) -> opencv2.Net
  429. }
  430. extension Dnn {
  431. @nonobjc public class func imagesFromBlob(blob_: opencv2.Mat, images_: inout [opencv2.Mat])
  432. }
  433. extension Dnn {
  434. @nonobjc public class func NMSBoxes(bboxes: [opencv2.Rect2d], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float, top_k: Swift.Int32)
  435. }
  436. extension Dnn {
  437. @nonobjc public class func NMSBoxes(bboxes: [opencv2.Rect2d], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float)
  438. }
  439. extension Dnn {
  440. @nonobjc public class func NMSBoxes(bboxes: [opencv2.Rect2d], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
  441. }
  442. extension Dnn {
  443. @nonobjc public class func NMSBoxes(bboxes: [opencv2.RotatedRect], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float, top_k: Swift.Int32)
  444. }
  445. extension Dnn {
  446. @nonobjc public class func NMSBoxes(bboxes: [opencv2.RotatedRect], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], eta: Swift.Float)
  447. }
  448. extension Dnn {
  449. @nonobjc public class func NMSBoxes(bboxes: [opencv2.RotatedRect], scores: [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
  450. }
  451. extension Dnn {
  452. @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], top_k: Darwin.size_t, sigma: Swift.Float, method: opencv2.SoftNMSMethod)
  453. }
  454. extension Dnn {
  455. @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], top_k: Darwin.size_t, sigma: Swift.Float)
  456. }
  457. extension Dnn {
  458. @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32], top_k: Darwin.size_t)
  459. }
  460. extension Dnn {
  461. @nonobjc public class func softNMSBoxes(bboxes: [opencv2.Rect2i], scores: [Swift.Float], updated_scores: inout [Swift.Float], score_threshold: Swift.Float, nms_threshold: Swift.Float, indices: inout [Swift.Int32])
  462. }
  463. extension DetectionModel {
  464. @nonobjc public func detect(frame: opencv2.Mat, classIds: inout [Swift.Int32], confidences: inout [Swift.Float], boxes: inout [opencv2.Rect2i], confThreshold: Swift.Float, nmsThreshold: Swift.Float)
  465. }
  466. extension DetectionModel {
  467. @nonobjc public func detect(frame: opencv2.Mat, classIds: inout [Swift.Int32], confidences: inout [Swift.Float], boxes: inout [opencv2.Rect2i], confThreshold: Swift.Float)
  468. }
  469. extension DetectionModel {
  470. @nonobjc public func detect(frame: opencv2.Mat, classIds: inout [Swift.Int32], confidences: inout [Swift.Float], boxes: inout [opencv2.Rect2i])
  471. }
  472. extension Layer {
  473. @nonobjc public func finalize(inputs: [opencv2.Mat], outputs: inout [opencv2.Mat])
  474. }
  475. extension Layer {
  476. @available(*, deprecated)
  477. @nonobjc public func run(inputs: [opencv2.Mat], outputs: inout [opencv2.Mat], internals: inout [opencv2.Mat])
  478. }
  479. extension Model {
  480. @nonobjc public func predict(frame: opencv2.Mat, outs: inout [opencv2.Mat])
  481. }
  482. extension Net {
  483. @nonobjc public class func readFromModelOptimizer(bufferModelConfig: [Swift.UInt8], bufferWeights: [Swift.UInt8]) -> opencv2.Net
  484. }
  485. extension Net {
  486. @nonobjc public func forward(outputBlobs: inout [opencv2.Mat], outputName: Swift.String)
  487. }
  488. extension Net {
  489. @nonobjc public func forward(outputBlobs: inout [opencv2.Mat])
  490. }
  491. extension Net {
  492. @nonobjc public func forward(outputBlobs: inout [opencv2.Mat], outBlobNames: [Swift.String])
  493. }
  494. extension Net {
  495. @nonobjc public func forwardAndRetrieve(outputBlobs: inout [[opencv2.Mat]], outBlobNames: [Swift.String])
  496. }
  497. extension Net {
  498. @nonobjc public func getInputDetails(scales: inout [Swift.Float], zeropoints: inout [Swift.Int32])
  499. }
  500. extension Net {
  501. @nonobjc public func getOutputDetails(scales: inout [Swift.Float], zeropoints: inout [Swift.Int32])
  502. }
  503. extension Net {
  504. @nonobjc public func getLayersShapes(netInputShapes: [opencv2.IntVector], layersIds: inout [Swift.Int32], inLayersShapes: inout [[opencv2.IntVector]], outLayersShapes: inout [[opencv2.IntVector]])
  505. }
  506. extension Net {
  507. @nonobjc public func getLayersShapes(netInputShape: opencv2.IntVector, layersIds: inout [Swift.Int32], inLayersShapes: inout [[opencv2.IntVector]], outLayersShapes: inout [[opencv2.IntVector]])
  508. }
  509. extension Net {
  510. @nonobjc public func getLayerTypes(layersTypes: inout [Swift.String])
  511. }
  512. extension Net {
  513. @nonobjc public func getPerfProfile(timings: inout [Swift.Double]) -> Swift.Int
  514. }
  515. extension TextDetectionModel {
  516. @nonobjc public func detect(frame: opencv2.Mat, detections: inout [[opencv2.Point2i]], confidences: inout [Swift.Float])
  517. }
  518. extension TextDetectionModel {
  519. @nonobjc public func detect(frame: opencv2.Mat, detections: inout [[opencv2.Point2i]])
  520. }
  521. extension TextDetectionModel {
  522. @nonobjc public func detectTextRectangles(frame: opencv2.Mat, detections: inout [opencv2.RotatedRect], confidences: inout [Swift.Float])
  523. }
  524. extension TextDetectionModel {
  525. @nonobjc public func detectTextRectangles(frame: opencv2.Mat, detections: inout [opencv2.RotatedRect])
  526. }
  527. extension TextRecognitionModel {
  528. @nonobjc public func recognize(frame: opencv2.Mat, roiRects: [opencv2.Mat], results: inout [Swift.String])
  529. }
  530. extension Features2d {
  531. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8], flags: opencv2.DrawMatchesFlags)
  532. }
  533. extension Features2d {
  534. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8])
  535. }
  536. extension Features2d {
  537. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchesThickness: Swift.Int32, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8], flags: opencv2.DrawMatchesFlags)
  538. }
  539. extension Features2d {
  540. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [opencv2.DMatch], outImg: opencv2.Mat, matchesThickness: Swift.Int32, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [Swift.Int8])
  541. }
  542. extension Features2d {
  543. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [[opencv2.DMatch]], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [[Swift.Int8]], flags: opencv2.DrawMatchesFlags)
  544. }
  545. extension Features2d {
  546. @nonobjc public class func drawMatches(img1: opencv2.Mat, keypoints1: [opencv2.KeyPoint], img2: opencv2.Mat, keypoints2: [opencv2.KeyPoint], matches1to2: [[opencv2.DMatch]], outImg: opencv2.Mat, matchColor: opencv2.Scalar, singlePointColor: opencv2.Scalar, matchesMask: [[Swift.Int8]])
  547. }
  548. extension AffineFeature {
  549. @nonobjc public func setViewParams(tilts: [Swift.Float], rolls: [Swift.Float])
  550. }
  551. extension AffineFeature {
  552. @nonobjc public func getViewParams(tilts: [Swift.Float], rolls: [Swift.Float])
  553. }
  554. extension BRISK {
  555. @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float, indexChange: [Swift.Int32]) -> opencv2.BRISK
  556. }
  557. extension BRISK {
  558. @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float) -> opencv2.BRISK
  559. }
  560. extension BRISK {
  561. @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float) -> opencv2.BRISK
  562. }
  563. extension BRISK {
  564. @nonobjc public class func create(radiusList: [Swift.Float], numberList: [Swift.Int32]) -> opencv2.BRISK
  565. }
  566. extension BRISK {
  567. @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float, indexChange: [Swift.Int32]) -> opencv2.BRISK
  568. }
  569. extension BRISK {
  570. @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float, dMin: Swift.Float) -> opencv2.BRISK
  571. }
  572. extension BRISK {
  573. @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32], dMax: Swift.Float) -> opencv2.BRISK
  574. }
  575. extension BRISK {
  576. @nonobjc public class func create(thresh: Swift.Int32, octaves: Swift.Int32, radiusList: [Swift.Float], numberList: [Swift.Int32]) -> opencv2.BRISK
  577. }
  578. extension DescriptorMatcher {
  579. @nonobjc public func match(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch], mask: opencv2.Mat)
  580. }
  581. extension DescriptorMatcher {
  582. @nonobjc public func match(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch])
  583. }
  584. extension DescriptorMatcher {
  585. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, mask: opencv2.Mat, compactResult: Swift.Bool)
  586. }
  587. extension DescriptorMatcher {
  588. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, mask: opencv2.Mat)
  589. }
  590. extension DescriptorMatcher {
  591. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32)
  592. }
  593. extension DescriptorMatcher {
  594. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, mask: opencv2.Mat, compactResult: Swift.Bool)
  595. }
  596. extension DescriptorMatcher {
  597. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, mask: opencv2.Mat)
  598. }
  599. extension DescriptorMatcher {
  600. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, trainDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float)
  601. }
  602. extension DescriptorMatcher {
  603. @nonobjc public func match(queryDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch], masks: [opencv2.Mat])
  604. }
  605. extension DescriptorMatcher {
  606. @nonobjc public func match(queryDescriptors: opencv2.Mat, matches: inout [opencv2.DMatch])
  607. }
  608. extension DescriptorMatcher {
  609. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, masks: [opencv2.Mat], compactResult: Swift.Bool)
  610. }
  611. extension DescriptorMatcher {
  612. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32, masks: [opencv2.Mat])
  613. }
  614. extension DescriptorMatcher {
  615. @nonobjc public func knnMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], k: Swift.Int32)
  616. }
  617. extension DescriptorMatcher {
  618. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, masks: [opencv2.Mat], compactResult: Swift.Bool)
  619. }
  620. extension DescriptorMatcher {
  621. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float, masks: [opencv2.Mat])
  622. }
  623. extension DescriptorMatcher {
  624. @nonobjc public func radiusMatch(queryDescriptors: opencv2.Mat, matches: inout [[opencv2.DMatch]], maxDistance: Swift.Float)
  625. }
  626. extension Feature2D {
  627. @nonobjc public func detect(image: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], mask: opencv2.Mat)
  628. }
  629. extension Feature2D {
  630. @nonobjc public func detect(image: opencv2.Mat, keypoints: inout [opencv2.KeyPoint])
  631. }
  632. extension Feature2D {
  633. @nonobjc public func detect(images: [opencv2.Mat], keypoints: inout [[opencv2.KeyPoint]], masks: [opencv2.Mat])
  634. }
  635. extension Feature2D {
  636. @nonobjc public func detect(images: [opencv2.Mat], keypoints: inout [[opencv2.KeyPoint]])
  637. }
  638. extension Feature2D {
  639. @nonobjc public func compute(image: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], descriptors: opencv2.Mat)
  640. }
  641. extension Feature2D {
  642. @nonobjc public func compute(images: [opencv2.Mat], keypoints: inout [[opencv2.KeyPoint]], descriptors: inout [opencv2.Mat])
  643. }
  644. extension Feature2D {
  645. @nonobjc public func detectAndCompute(image: opencv2.Mat, mask: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], descriptors: opencv2.Mat, useProvidedKeypoints: Swift.Bool)
  646. }
  647. extension Feature2D {
  648. @nonobjc public func detectAndCompute(image: opencv2.Mat, mask: opencv2.Mat, keypoints: inout [opencv2.KeyPoint], descriptors: opencv2.Mat)
  649. }
  650. extension MSER {
  651. @nonobjc public func detectRegions(image: opencv2.Mat, msers: inout [[opencv2.Point2i]], bboxes: inout [opencv2.Rect2i])
  652. }
  653. extension Imgcodecs {
  654. @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat], flags: Swift.Int32) -> Swift.Bool
  655. }
  656. extension Imgcodecs {
  657. @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat]) -> Swift.Bool
  658. }
  659. extension Imgcodecs {
  660. @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat], start: Swift.Int32, count: Swift.Int32, flags: Swift.Int32) -> Swift.Bool
  661. }
  662. extension Imgcodecs {
  663. @nonobjc public class func imreadmulti(filename: Swift.String, mats: inout [opencv2.Mat], start: Swift.Int32, count: Swift.Int32) -> Swift.Bool
  664. }
  665. extension Imgcodecs {
  666. @nonobjc public class func imwrite(filename: Swift.String, img: opencv2.Mat, params: [Swift.Int32]) -> Swift.Bool
  667. }
  668. extension Imgcodecs {
  669. @nonobjc public class func imwritemulti(filename: Swift.String, img: [opencv2.Mat], params: [Swift.Int32]) -> Swift.Bool
  670. }
  671. extension Imgcodecs {
  672. @nonobjc public class func imencode(ext: Swift.String, img: opencv2.Mat, buf: inout [Swift.UInt8], params: [Swift.Int32]) -> Swift.Bool
  673. }
  674. extension Imgcodecs {
  675. @nonobjc public class func imencode(ext: Swift.String, img: opencv2.Mat, buf: inout [Swift.UInt8]) -> Swift.Bool
  676. }
  677. extension Imgproc {
  678. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, useHarrisDetector: Swift.Bool, k: Swift.Double)
  679. }
  680. extension Imgproc {
  681. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, useHarrisDetector: Swift.Bool)
  682. }
  683. extension Imgproc {
  684. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32)
  685. }
  686. extension Imgproc {
  687. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat)
  688. }
  689. extension Imgproc {
  690. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double)
  691. }
  692. extension Imgproc {
  693. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, gradientSize: Swift.Int32, useHarrisDetector: Swift.Bool, k: Swift.Double)
  694. }
  695. extension Imgproc {
  696. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, gradientSize: Swift.Int32, useHarrisDetector: Swift.Bool)
  697. }
  698. extension Imgproc {
  699. @nonobjc public class func goodFeaturesToTrack(image: opencv2.Mat, corners: inout [opencv2.Point2i], maxCorners: Swift.Int32, qualityLevel: Swift.Double, minDistance: Swift.Double, mask: opencv2.Mat, blockSize: Swift.Int32, gradientSize: Swift.Int32)
  700. }
  701. extension Imgproc {
  702. @nonobjc public class func calcHist(images: [opencv2.Mat], channels: [Swift.Int32], mask: opencv2.Mat, hist: opencv2.Mat, histSize: [Swift.Int32], ranges: [Swift.Float], accumulate: Swift.Bool)
  703. }
  704. extension Imgproc {
  705. @nonobjc public class func calcHist(images: [opencv2.Mat], channels: [Swift.Int32], mask: opencv2.Mat, hist: opencv2.Mat, histSize: [Swift.Int32], ranges: [Swift.Float])
  706. }
  707. extension Imgproc {
  708. @nonobjc public class func calcBackProject(images: [opencv2.Mat], channels: [Swift.Int32], hist: opencv2.Mat, dst: opencv2.Mat, ranges: [Swift.Float], scale: Swift.Double)
  709. }
  710. extension Imgproc {
  711. @nonobjc public class func findContours(image: opencv2.Mat, contours: inout [[opencv2.Point2i]], hierarchy: opencv2.Mat, mode: opencv2.RetrievalModes, method: opencv2.ContourApproximationModes, offset: opencv2.Point2i)
  712. }
  713. extension Imgproc {
  714. @nonobjc public class func findContours(image: opencv2.Mat, contours: inout [[opencv2.Point2i]], hierarchy: opencv2.Mat, mode: opencv2.RetrievalModes, method: opencv2.ContourApproximationModes)
  715. }
  716. extension Imgproc {
  717. @nonobjc public class func approxPolyDP(curve: [opencv2.Point2f], approxCurve: inout [opencv2.Point2f], epsilon: Swift.Double, closed: Swift.Bool)
  718. }
  719. extension Imgproc {
  720. @nonobjc public class func convexHull(points: [opencv2.Point2i], hull: inout [Swift.Int32], clockwise: Swift.Bool)
  721. }
  722. extension Imgproc {
  723. @nonobjc public class func convexHull(points: [opencv2.Point2i], hull: inout [Swift.Int32])
  724. }
  725. extension Imgproc {
  726. @nonobjc public class func convexityDefects(contour: [opencv2.Point2i], convexhull: [Swift.Int32], convexityDefects: inout [opencv2.Int4])
  727. }
  728. extension Imgproc {
  729. @nonobjc public class func ellipse2Poly(center: opencv2.Point2i, axes: opencv2.Size2i, angle: Swift.Int32, arcStart: Swift.Int32, arcEnd: Swift.Int32, delta: Swift.Int32, pts: inout [opencv2.Point2i])
  730. }
  731. extension Subdiv2D {
  732. @nonobjc public func getEdgeList(edgeList: inout [opencv2.Float4])
  733. }
  734. extension Subdiv2D {
  735. @nonobjc public func getLeadingEdgeList(leadingEdgeList: inout [Swift.Int32])
  736. }
  737. extension Subdiv2D {
  738. @nonobjc public func getTriangleList(triangleList: inout [opencv2.Float6])
  739. }
  740. extension Subdiv2D {
  741. @nonobjc public func getVoronoiFacetList(idx: [Swift.Int32], facetList: inout [[opencv2.Point2f]], facetCenters: inout [opencv2.Point2f])
  742. }
  743. extension EM {
  744. @nonobjc public func getCovs(covs: inout [opencv2.Mat])
  745. }
  746. extension Objdetect {
  747. @nonobjc public class func groupRectangles(rectList: inout [opencv2.Rect2i], weights: inout [Swift.Int32], groupThreshold: Swift.Int32, eps: Swift.Double)
  748. }
  749. extension Objdetect {
  750. @nonobjc public class func groupRectangles(rectList: inout [opencv2.Rect2i], weights: inout [Swift.Int32], groupThreshold: Swift.Int32)
  751. }
  752. extension CascadeClassifier {
  753. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i)
  754. }
  755. extension CascadeClassifier {
  756. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i)
  757. }
  758. extension CascadeClassifier {
  759. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32)
  760. }
  761. extension CascadeClassifier {
  762. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double, minNeighbors: Swift.Int32)
  763. }
  764. extension CascadeClassifier {
  765. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], scaleFactor: Swift.Double)
  766. }
  767. extension CascadeClassifier {
  768. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i])
  769. }
  770. extension CascadeClassifier {
  771. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i)
  772. }
  773. extension CascadeClassifier {
  774. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i)
  775. }
  776. extension CascadeClassifier {
  777. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32)
  778. }
  779. extension CascadeClassifier {
  780. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double, minNeighbors: Swift.Int32)
  781. }
  782. extension CascadeClassifier {
  783. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32], scaleFactor: Swift.Double)
  784. }
  785. extension CascadeClassifier {
  786. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], numDetections: inout [Swift.Int32])
  787. }
  788. extension CascadeClassifier {
  789. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i, outputRejectLevels: Swift.Bool)
  790. }
  791. extension CascadeClassifier {
  792. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i, maxSize: opencv2.Size2i)
  793. }
  794. extension CascadeClassifier {
  795. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32, minSize: opencv2.Size2i)
  796. }
  797. extension CascadeClassifier {
  798. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32, flags: Swift.Int32)
  799. }
  800. extension CascadeClassifier {
  801. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double, minNeighbors: Swift.Int32)
  802. }
  803. extension CascadeClassifier {
  804. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double], scaleFactor: Swift.Double)
  805. }
  806. extension CascadeClassifier {
  807. @nonobjc public func detectMultiScale(image: opencv2.Mat, objects: inout [opencv2.Rect2i], rejectLevels: inout [Swift.Int32], levelWeights: inout [Swift.Double])
  808. }
  809. extension HOGDescriptor {
  810. @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float], winStride: opencv2.Size2i, padding: opencv2.Size2i, locations: [opencv2.Point2i])
  811. }
  812. extension HOGDescriptor {
  813. @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float], winStride: opencv2.Size2i, padding: opencv2.Size2i)
  814. }
  815. extension HOGDescriptor {
  816. @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float], winStride: opencv2.Size2i)
  817. }
  818. extension HOGDescriptor {
  819. @nonobjc public func compute(img: opencv2.Mat, descriptors: inout [Swift.Float])
  820. }
  821. extension HOGDescriptor {
  822. @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, searchLocations: [opencv2.Point2i])
  823. }
  824. extension HOGDescriptor {
  825. @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i)
  826. }
  827. extension HOGDescriptor {
  828. @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i)
  829. }
  830. extension HOGDescriptor {
  831. @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double], hitThreshold: Swift.Double)
  832. }
  833. extension HOGDescriptor {
  834. @nonobjc public func detect(img: opencv2.Mat, foundLocations: inout [opencv2.Point2i], weights: inout [Swift.Double])
  835. }
  836. extension HOGDescriptor {
  837. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, scale: Swift.Double, groupThreshold: Swift.Double, useMeanshiftGrouping: Swift.Bool)
  838. }
  839. extension HOGDescriptor {
  840. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, scale: Swift.Double, groupThreshold: Swift.Double)
  841. }
  842. extension HOGDescriptor {
  843. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i, scale: Swift.Double)
  844. }
  845. extension HOGDescriptor {
  846. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i, padding: opencv2.Size2i)
  847. }
  848. extension HOGDescriptor {
  849. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double, winStride: opencv2.Size2i)
  850. }
  851. extension HOGDescriptor {
  852. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double], hitThreshold: Swift.Double)
  853. }
  854. extension HOGDescriptor {
  855. @nonobjc public func detectMultiScale(img: opencv2.Mat, foundLocations: inout [opencv2.Rect2i], foundWeights: inout [Swift.Double])
  856. }
  857. extension QRCodeDetector {
  858. @nonobjc public func decodeMulti(img: opencv2.Mat, points: opencv2.Mat, decoded_info: inout [Swift.String], straight_qrcode: inout [opencv2.Mat]) -> Swift.Bool
  859. }
  860. extension QRCodeDetector {
  861. @nonobjc public func decodeMulti(img: opencv2.Mat, points: opencv2.Mat, decoded_info: inout [Swift.String]) -> Swift.Bool
  862. }
  863. extension QRCodeDetector {
  864. @nonobjc public func detectAndDecodeMulti(img: opencv2.Mat, decoded_info: inout [Swift.String], points: opencv2.Mat, straight_qrcode: inout [opencv2.Mat]) -> Swift.Bool
  865. }
  866. extension QRCodeDetector {
  867. @nonobjc public func detectAndDecodeMulti(img: opencv2.Mat, decoded_info: inout [Swift.String], points: opencv2.Mat) -> Swift.Bool
  868. }
  869. extension QRCodeDetector {
  870. @nonobjc public func detectAndDecodeMulti(img: opencv2.Mat, decoded_info: inout [Swift.String]) -> Swift.Bool
  871. }
  872. extension QRCodeEncoder {
  873. @nonobjc public func encodeStructuredAppend(encoded_info: Swift.String, qrcodes: inout [opencv2.Mat])
  874. }
  875. extension Photo {
  876. @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32, normType: Swift.Int32)
  877. }
  878. extension Photo {
  879. @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32)
  880. }
  881. extension Photo {
  882. @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float], templateWindowSize: Swift.Int32)
  883. }
  884. extension Photo {
  885. @nonobjc public class func fastNlMeansDenoising(src: opencv2.Mat, dst: opencv2.Mat, hVector: [Swift.Float])
  886. }
  887. extension Photo {
  888. @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32, normType: Swift.Int32)
  889. }
  890. extension Photo {
  891. @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float], templateWindowSize: Swift.Int32, searchWindowSize: Swift.Int32)
  892. }
  893. extension Photo {
  894. @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float], templateWindowSize: Swift.Int32)
  895. }
  896. extension Photo {
  897. @nonobjc public class func fastNlMeansDenoisingMulti(srcImgs: [opencv2.Mat], dst: opencv2.Mat, imgToDenoiseIndex: Swift.Int32, temporalWindowSize: Swift.Int32, hVector: [Swift.Float])
  898. }
  899. extension Video {
  900. @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool, pyrBorder: Swift.Int32, derivBorder: Swift.Int32, tryReuseInputImage: Swift.Bool) -> Swift.Int32
  901. }
  902. extension Video {
  903. @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool, pyrBorder: Swift.Int32, derivBorder: Swift.Int32) -> Swift.Int32
  904. }
  905. extension Video {
  906. @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool, pyrBorder: Swift.Int32) -> Swift.Int32
  907. }
  908. extension Video {
  909. @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32, withDerivatives: Swift.Bool) -> Swift.Int32
  910. }
  911. extension Video {
  912. @nonobjc public class func buildOpticalFlowPyramid(img: opencv2.Mat, pyramid: inout [opencv2.Mat], winSize: opencv2.Size2i, maxLevel: Swift.Int32) -> Swift.Int32
  913. }
  914. extension VideoCapture {
  915. @nonobjc public convenience init(filename: Swift.String, apiPreference: Swift.Int32, params: [Swift.Int32])
  916. }
  917. extension VideoCapture {
  918. @nonobjc public convenience init(index: Swift.Int32, apiPreference: Swift.Int32, params: [Swift.Int32])
  919. }
  920. extension VideoCapture {
  921. @nonobjc public func open(filename: Swift.String, apiPreference: Swift.Int32, params: [Swift.Int32]) -> Swift.Bool
  922. }
  923. extension VideoCapture {
  924. @nonobjc public func open(index: Swift.Int32, apiPreference: Swift.Int32, params: [Swift.Int32]) -> Swift.Bool
  925. }
  926. extension VideoWriter {
  927. @nonobjc public convenience init(filename: Swift.String, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32])
  928. }
  929. extension VideoWriter {
  930. @nonobjc public convenience init(filename: Swift.String, apiPreference: Swift.Int32, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32])
  931. }
  932. extension VideoWriter {
  933. @nonobjc public func open(filename: Swift.String, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32]) -> Swift.Bool
  934. }
  935. extension VideoWriter {
  936. @nonobjc public func open(filename: Swift.String, apiPreference: Swift.Int32, fourcc: Swift.Int32, fps: Swift.Double, frameSize: opencv2.Size2i, params: [Swift.Int32]) -> Swift.Bool
  937. }