sast_postprocess.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357
  1. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. from __future__ import division
  16. from __future__ import print_function
  17. import os
  18. import sys
  19. __dir__ = os.path.dirname(__file__)
  20. sys.path.append(__dir__)
  21. sys.path.append(os.path.join(__dir__, '..'))
  22. import numpy as np
  23. from .locality_aware_nms import nms_locality
  24. import paddle
  25. import cv2
  26. import time
  27. class SASTPostProcess(object):
  28. """
  29. The post process for SAST.
  30. """
  31. def __init__(self,
  32. score_thresh=0.5,
  33. nms_thresh=0.2,
  34. sample_pts_num=2,
  35. shrink_ratio_of_width=0.3,
  36. expand_scale=1.0,
  37. tcl_map_thresh=0.5,
  38. **kwargs):
  39. self.score_thresh = score_thresh
  40. self.nms_thresh = nms_thresh
  41. self.sample_pts_num = sample_pts_num
  42. self.shrink_ratio_of_width = shrink_ratio_of_width
  43. self.expand_scale = expand_scale
  44. self.tcl_map_thresh = tcl_map_thresh
  45. # c++ la-nms is faster, but only support python 3.5
  46. self.is_python35 = False
  47. if sys.version_info.major == 3 and sys.version_info.minor == 5:
  48. self.is_python35 = True
  49. def point_pair2poly(self, point_pair_list):
  50. """
  51. Transfer vertical point_pairs into poly point in clockwise.
  52. """
  53. # constract poly
  54. point_num = len(point_pair_list) * 2
  55. point_list = [0] * point_num
  56. for idx, point_pair in enumerate(point_pair_list):
  57. point_list[idx] = point_pair[0]
  58. point_list[point_num - 1 - idx] = point_pair[1]
  59. return np.array(point_list).reshape(-1, 2)
  60. def shrink_quad_along_width(self,
  61. quad,
  62. begin_width_ratio=0.,
  63. end_width_ratio=1.):
  64. """
  65. Generate shrink_quad_along_width.
  66. """
  67. ratio_pair = np.array(
  68. [[begin_width_ratio], [end_width_ratio]], dtype=np.float32)
  69. p0_1 = quad[0] + (quad[1] - quad[0]) * ratio_pair
  70. p3_2 = quad[3] + (quad[2] - quad[3]) * ratio_pair
  71. return np.array([p0_1[0], p0_1[1], p3_2[1], p3_2[0]])
  72. def expand_poly_along_width(self, poly, shrink_ratio_of_width=0.3):
  73. """
  74. expand poly along width.
  75. """
  76. point_num = poly.shape[0]
  77. left_quad = np.array(
  78. [poly[0], poly[1], poly[-2], poly[-1]], dtype=np.float32)
  79. left_ratio = -shrink_ratio_of_width * np.linalg.norm(left_quad[0] - left_quad[3]) / \
  80. (np.linalg.norm(left_quad[0] - left_quad[1]) + 1e-6)
  81. left_quad_expand = self.shrink_quad_along_width(left_quad, left_ratio,
  82. 1.0)
  83. right_quad = np.array(
  84. [
  85. poly[point_num // 2 - 2], poly[point_num // 2 - 1],
  86. poly[point_num // 2], poly[point_num // 2 + 1]
  87. ],
  88. dtype=np.float32)
  89. right_ratio = 1.0 + \
  90. shrink_ratio_of_width * np.linalg.norm(right_quad[0] - right_quad[3]) / \
  91. (np.linalg.norm(right_quad[0] - right_quad[1]) + 1e-6)
  92. right_quad_expand = self.shrink_quad_along_width(right_quad, 0.0,
  93. right_ratio)
  94. poly[0] = left_quad_expand[0]
  95. poly[-1] = left_quad_expand[-1]
  96. poly[point_num // 2 - 1] = right_quad_expand[1]
  97. poly[point_num // 2] = right_quad_expand[2]
  98. return poly
  99. def restore_quad(self, tcl_map, tcl_map_thresh, tvo_map):
  100. """Restore quad."""
  101. xy_text = np.argwhere(tcl_map[:, :, 0] > tcl_map_thresh)
  102. xy_text = xy_text[:, ::-1] # (n, 2)
  103. # Sort the text boxes via the y axis
  104. xy_text = xy_text[np.argsort(xy_text[:, 1])]
  105. scores = tcl_map[xy_text[:, 1], xy_text[:, 0], 0]
  106. scores = scores[:, np.newaxis]
  107. # Restore
  108. point_num = int(tvo_map.shape[-1] / 2)
  109. assert point_num == 4
  110. tvo_map = tvo_map[xy_text[:, 1], xy_text[:, 0], :]
  111. xy_text_tile = np.tile(xy_text, (1, point_num)) # (n, point_num * 2)
  112. quads = xy_text_tile - tvo_map
  113. return scores, quads, xy_text
  114. def quad_area(self, quad):
  115. """
  116. compute area of a quad.
  117. """
  118. edge = [(quad[1][0] - quad[0][0]) * (quad[1][1] + quad[0][1]),
  119. (quad[2][0] - quad[1][0]) * (quad[2][1] + quad[1][1]),
  120. (quad[3][0] - quad[2][0]) * (quad[3][1] + quad[2][1]),
  121. (quad[0][0] - quad[3][0]) * (quad[0][1] + quad[3][1])]
  122. return np.sum(edge) / 2.
  123. def nms(self, dets):
  124. if self.is_python35:
  125. from ppocr.utils.utility import check_install
  126. check_install('lanms', 'lanms-nova')
  127. import lanms
  128. dets = lanms.merge_quadrangle_n9(dets, self.nms_thresh)
  129. else:
  130. dets = nms_locality(dets, self.nms_thresh)
  131. return dets
  132. def cluster_by_quads_tco(self, tcl_map, tcl_map_thresh, quads, tco_map):
  133. """
  134. Cluster pixels in tcl_map based on quads.
  135. """
  136. instance_count = quads.shape[0] + 1 # contain background
  137. instance_label_map = np.zeros(tcl_map.shape[:2], dtype=np.int32)
  138. if instance_count == 1:
  139. return instance_count, instance_label_map
  140. # predict text center
  141. xy_text = np.argwhere(tcl_map[:, :, 0] > tcl_map_thresh)
  142. n = xy_text.shape[0]
  143. xy_text = xy_text[:, ::-1] # (n, 2)
  144. tco = tco_map[xy_text[:, 1], xy_text[:, 0], :] # (n, 2)
  145. pred_tc = xy_text - tco
  146. # get gt text center
  147. m = quads.shape[0]
  148. gt_tc = np.mean(quads, axis=1) # (m, 2)
  149. pred_tc_tile = np.tile(pred_tc[:, np.newaxis, :],
  150. (1, m, 1)) # (n, m, 2)
  151. gt_tc_tile = np.tile(gt_tc[np.newaxis, :, :], (n, 1, 1)) # (n, m, 2)
  152. dist_mat = np.linalg.norm(pred_tc_tile - gt_tc_tile, axis=2) # (n, m)
  153. xy_text_assign = np.argmin(dist_mat, axis=1) + 1 # (n,)
  154. instance_label_map[xy_text[:, 1], xy_text[:, 0]] = xy_text_assign
  155. return instance_count, instance_label_map
  156. def estimate_sample_pts_num(self, quad, xy_text):
  157. """
  158. Estimate sample points number.
  159. """
  160. eh = (np.linalg.norm(quad[0] - quad[3]) +
  161. np.linalg.norm(quad[1] - quad[2])) / 2.0
  162. ew = (np.linalg.norm(quad[0] - quad[1]) +
  163. np.linalg.norm(quad[2] - quad[3])) / 2.0
  164. dense_sample_pts_num = max(2, int(ew))
  165. dense_xy_center_line = xy_text[np.linspace(
  166. 0,
  167. xy_text.shape[0] - 1,
  168. dense_sample_pts_num,
  169. endpoint=True,
  170. dtype=np.float32).astype(np.int32)]
  171. dense_xy_center_line_diff = dense_xy_center_line[
  172. 1:] - dense_xy_center_line[:-1]
  173. estimate_arc_len = np.sum(
  174. np.linalg.norm(
  175. dense_xy_center_line_diff, axis=1))
  176. sample_pts_num = max(2, int(estimate_arc_len / eh))
  177. return sample_pts_num
  178. def detect_sast(self,
  179. tcl_map,
  180. tvo_map,
  181. tbo_map,
  182. tco_map,
  183. ratio_w,
  184. ratio_h,
  185. src_w,
  186. src_h,
  187. shrink_ratio_of_width=0.3,
  188. tcl_map_thresh=0.5,
  189. offset_expand=1.0,
  190. out_strid=4.0):
  191. """
  192. first resize the tcl_map, tvo_map and tbo_map to the input_size, then restore the polys
  193. """
  194. # restore quad
  195. scores, quads, xy_text = self.restore_quad(tcl_map, tcl_map_thresh,
  196. tvo_map)
  197. dets = np.hstack((quads, scores)).astype(np.float32, copy=False)
  198. dets = self.nms(dets)
  199. if dets.shape[0] == 0:
  200. return []
  201. quads = dets[:, :-1].reshape(-1, 4, 2)
  202. # Compute quad area
  203. quad_areas = []
  204. for quad in quads:
  205. quad_areas.append(-self.quad_area(quad))
  206. # instance segmentation
  207. # instance_count, instance_label_map = cv2.connectedComponents(tcl_map.astype(np.uint8), connectivity=8)
  208. instance_count, instance_label_map = self.cluster_by_quads_tco(
  209. tcl_map, tcl_map_thresh, quads, tco_map)
  210. # restore single poly with tcl instance.
  211. poly_list = []
  212. for instance_idx in range(1, instance_count):
  213. xy_text = np.argwhere(instance_label_map == instance_idx)[:, ::-1]
  214. quad = quads[instance_idx - 1]
  215. q_area = quad_areas[instance_idx - 1]
  216. if q_area < 5:
  217. continue
  218. #
  219. len1 = float(np.linalg.norm(quad[0] - quad[1]))
  220. len2 = float(np.linalg.norm(quad[1] - quad[2]))
  221. min_len = min(len1, len2)
  222. if min_len < 3:
  223. continue
  224. # filter small CC
  225. if xy_text.shape[0] <= 0:
  226. continue
  227. # filter low confidence instance
  228. xy_text_scores = tcl_map[xy_text[:, 1], xy_text[:, 0], 0]
  229. if np.sum(xy_text_scores) / quad_areas[instance_idx - 1] < 0.1:
  230. # if np.sum(xy_text_scores) / quad_areas[instance_idx - 1] < 0.05:
  231. continue
  232. # sort xy_text
  233. left_center_pt = np.array(
  234. [[(quad[0, 0] + quad[-1, 0]) / 2.0,
  235. (quad[0, 1] + quad[-1, 1]) / 2.0]]) # (1, 2)
  236. right_center_pt = np.array(
  237. [[(quad[1, 0] + quad[2, 0]) / 2.0,
  238. (quad[1, 1] + quad[2, 1]) / 2.0]]) # (1, 2)
  239. proj_unit_vec = (right_center_pt - left_center_pt) / \
  240. (np.linalg.norm(right_center_pt - left_center_pt) + 1e-6)
  241. proj_value = np.sum(xy_text * proj_unit_vec, axis=1)
  242. xy_text = xy_text[np.argsort(proj_value)]
  243. # Sample pts in tcl map
  244. if self.sample_pts_num == 0:
  245. sample_pts_num = self.estimate_sample_pts_num(quad, xy_text)
  246. else:
  247. sample_pts_num = self.sample_pts_num
  248. xy_center_line = xy_text[np.linspace(
  249. 0,
  250. xy_text.shape[0] - 1,
  251. sample_pts_num,
  252. endpoint=True,
  253. dtype=np.float32).astype(np.int32)]
  254. point_pair_list = []
  255. for x, y in xy_center_line:
  256. # get corresponding offset
  257. offset = tbo_map[y, x, :].reshape(2, 2)
  258. if offset_expand != 1.0:
  259. offset_length = np.linalg.norm(
  260. offset, axis=1, keepdims=True)
  261. expand_length = np.clip(
  262. offset_length * (offset_expand - 1),
  263. a_min=0.5,
  264. a_max=3.0)
  265. offset_detal = offset / offset_length * expand_length
  266. offset = offset + offset_detal
  267. # original point
  268. ori_yx = np.array([y, x], dtype=np.float32)
  269. point_pair = (ori_yx + offset)[:, ::-1] * out_strid / np.array(
  270. [ratio_w, ratio_h]).reshape(-1, 2)
  271. point_pair_list.append(point_pair)
  272. # ndarry: (x, 2), expand poly along width
  273. detected_poly = self.point_pair2poly(point_pair_list)
  274. detected_poly = self.expand_poly_along_width(detected_poly,
  275. shrink_ratio_of_width)
  276. detected_poly[:, 0] = np.clip(
  277. detected_poly[:, 0], a_min=0, a_max=src_w)
  278. detected_poly[:, 1] = np.clip(
  279. detected_poly[:, 1], a_min=0, a_max=src_h)
  280. poly_list.append(detected_poly)
  281. return poly_list
  282. def __call__(self, outs_dict, shape_list):
  283. score_list = outs_dict['f_score']
  284. border_list = outs_dict['f_border']
  285. tvo_list = outs_dict['f_tvo']
  286. tco_list = outs_dict['f_tco']
  287. if isinstance(score_list, paddle.Tensor):
  288. score_list = score_list.numpy()
  289. border_list = border_list.numpy()
  290. tvo_list = tvo_list.numpy()
  291. tco_list = tco_list.numpy()
  292. img_num = len(shape_list)
  293. poly_lists = []
  294. for ino in range(img_num):
  295. p_score = score_list[ino].transpose((1, 2, 0))
  296. p_border = border_list[ino].transpose((1, 2, 0))
  297. p_tvo = tvo_list[ino].transpose((1, 2, 0))
  298. p_tco = tco_list[ino].transpose((1, 2, 0))
  299. src_h, src_w, ratio_h, ratio_w = shape_list[ino]
  300. poly_list = self.detect_sast(
  301. p_score,
  302. p_tvo,
  303. p_border,
  304. p_tco,
  305. ratio_w,
  306. ratio_h,
  307. src_w,
  308. src_h,
  309. shrink_ratio_of_width=self.shrink_ratio_of_width,
  310. tcl_map_thresh=self.tcl_map_thresh,
  311. offset_expand=self.expand_scale)
  312. poly_lists.append({'points': np.array(poly_list)})
  313. return poly_lists