|
@@ -0,0 +1,132 @@
|
|
|
+from paddle.static import InputSpec
|
|
|
+import paddle
|
|
|
+
|
|
|
+class Document(paddle.nn.Layer):
|
|
|
+ def __init__(self):
|
|
|
+ super(Document, self).__init__()
|
|
|
+ self.x2paddle_fc_weight = self.create_parameter(shape=[8, 64], attr='x2paddle_fc_weight', dtype='float32', default_initializer=paddle.nn.initializer.Constant(value=0.0))
|
|
|
+ self.x2paddle_fc_bias = self.create_parameter(shape=[8], attr='x2paddle_fc_bias', dtype='float32', default_initializer=paddle.nn.initializer.Constant(value=0.0))
|
|
|
+ self.conv0 = paddle.nn.Conv2D(in_channels=3, out_channels=16, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu0 = paddle.nn.ReLU()
|
|
|
+ self.conv1 = paddle.nn.Conv2D(in_channels=16, out_channels=16, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu1 = paddle.nn.ReLU()
|
|
|
+ self.conv2 = paddle.nn.Conv2D(in_channels=16, out_channels=16, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu2 = paddle.nn.ReLU()
|
|
|
+ self.conv3 = paddle.nn.Conv2D(in_channels=16, out_channels=16, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu3 = paddle.nn.ReLU()
|
|
|
+ self.conv4 = paddle.nn.Conv2D(in_channels=16, out_channels=16, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu4 = paddle.nn.ReLU()
|
|
|
+ self.conv5 = paddle.nn.Conv2D(in_channels=16, out_channels=16, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu5 = paddle.nn.ReLU()
|
|
|
+ self.conv6 = paddle.nn.Conv2D(in_channels=16, out_channels=16, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu6 = paddle.nn.ReLU()
|
|
|
+ self.conv7 = paddle.nn.Conv2D(in_channels=16, out_channels=32, kernel_size=[3, 3], stride=2, padding=1)
|
|
|
+ self.pad0 = paddle.nn.Pad2D(mode='constant', padding=[0, 0, 0, 0, 0, 0, 0, 0])
|
|
|
+ self.relu7 = paddle.nn.ReLU()
|
|
|
+ self.pool0 = paddle.nn.AvgPool2D(kernel_size=[1, 1], stride=2)
|
|
|
+ self.conv8 = paddle.nn.Conv2D(in_channels=32, out_channels=32, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu8 = paddle.nn.ReLU()
|
|
|
+ self.conv9 = paddle.nn.Conv2D(in_channels=32, out_channels=32, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu9 = paddle.nn.ReLU()
|
|
|
+ self.conv10 = paddle.nn.Conv2D(in_channels=32, out_channels=32, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu10 = paddle.nn.ReLU()
|
|
|
+ self.conv11 = paddle.nn.Conv2D(in_channels=32, out_channels=32, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu11 = paddle.nn.ReLU()
|
|
|
+ self.conv12 = paddle.nn.Conv2D(in_channels=32, out_channels=32, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu12 = paddle.nn.ReLU()
|
|
|
+ self.conv13 = paddle.nn.Conv2D(in_channels=32, out_channels=64, kernel_size=[3, 3], stride=2, padding=1)
|
|
|
+ self.pad1 = paddle.nn.Pad2D(mode='constant', padding=[0, 0, 0, 0, 0, 0, 0, 0])
|
|
|
+ self.relu13 = paddle.nn.ReLU()
|
|
|
+ self.pool1 = paddle.nn.AvgPool2D(kernel_size=[1, 1], stride=2)
|
|
|
+ self.conv14 = paddle.nn.Conv2D(in_channels=64, out_channels=64, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu14 = paddle.nn.ReLU()
|
|
|
+ self.conv15 = paddle.nn.Conv2D(in_channels=64, out_channels=64, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu15 = paddle.nn.ReLU()
|
|
|
+ self.conv16 = paddle.nn.Conv2D(in_channels=64, out_channels=64, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu16 = paddle.nn.ReLU()
|
|
|
+ self.conv17 = paddle.nn.Conv2D(in_channels=64, out_channels=64, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu17 = paddle.nn.ReLU()
|
|
|
+ self.conv18 = paddle.nn.Conv2D(in_channels=64, out_channels=64, kernel_size=[3, 3], padding=1)
|
|
|
+ self.relu18 = paddle.nn.ReLU()
|
|
|
+ self.pad2 = paddle.nn.Pad2D(mode='constant', padding=[0, 0, 0, 0, 0, 0, 0, 0])
|
|
|
+ self.pool2 = paddle.nn.AvgPool2D(kernel_size=[8, 8], stride=8)
|
|
|
+
|
|
|
+ def forward(self, x2paddle_input):
|
|
|
+ x2paddle_fc_weight = self.x2paddle_fc_weight
|
|
|
+ x2paddle_fc_bias = self.x2paddle_fc_bias
|
|
|
+ x2paddle_150 = paddle.full(dtype='float32', shape=[1], fill_value=0.0)
|
|
|
+ x2paddle_176 = paddle.full(dtype='float32', shape=[1], fill_value=0.0)
|
|
|
+ x2paddle_204 = self.conv0(x2paddle_input)
|
|
|
+ x2paddle_121 = self.relu0(x2paddle_204)
|
|
|
+ x2paddle_207 = self.conv1(x2paddle_121)
|
|
|
+ x2paddle_124 = self.relu1(x2paddle_207)
|
|
|
+ x2paddle_210 = self.conv2(x2paddle_124)
|
|
|
+ x2paddle_127 = paddle.add(x=x2paddle_121, y=x2paddle_210)
|
|
|
+ x2paddle_128 = self.relu2(x2paddle_127)
|
|
|
+ x2paddle_213 = self.conv3(x2paddle_128)
|
|
|
+ x2paddle_131 = self.relu3(x2paddle_213)
|
|
|
+ x2paddle_216 = self.conv4(x2paddle_131)
|
|
|
+ x2paddle_134 = paddle.add(x=x2paddle_128, y=x2paddle_216)
|
|
|
+ x2paddle_135 = self.relu4(x2paddle_134)
|
|
|
+ x2paddle_219 = self.conv5(x2paddle_135)
|
|
|
+ x2paddle_138 = self.relu5(x2paddle_219)
|
|
|
+ x2paddle_222 = self.conv6(x2paddle_138)
|
|
|
+ x2paddle_141 = paddle.add(x=x2paddle_135, y=x2paddle_222)
|
|
|
+ x2paddle_142 = self.relu6(x2paddle_141)
|
|
|
+ x2paddle_225 = self.conv7(x2paddle_142)
|
|
|
+ x2paddle_148 = self.pad0(x2paddle_142)
|
|
|
+ x2paddle_145 = self.relu7(x2paddle_225)
|
|
|
+ x2paddle_149 = self.pool0(x2paddle_148)
|
|
|
+ x2paddle_228 = self.conv8(x2paddle_145)
|
|
|
+ x2paddle_151 = paddle.multiply(x=x2paddle_149, y=x2paddle_150)
|
|
|
+ x2paddle_152 = paddle.concat(x=[x2paddle_149, x2paddle_151], axis=1)
|
|
|
+ x2paddle_153 = paddle.add(x=x2paddle_152, y=x2paddle_228)
|
|
|
+ x2paddle_154 = self.relu8(x2paddle_153)
|
|
|
+ x2paddle_231 = self.conv9(x2paddle_154)
|
|
|
+ x2paddle_157 = self.relu9(x2paddle_231)
|
|
|
+ x2paddle_234 = self.conv10(x2paddle_157)
|
|
|
+ x2paddle_160 = paddle.add(x=x2paddle_154, y=x2paddle_234)
|
|
|
+ x2paddle_161 = self.relu10(x2paddle_160)
|
|
|
+ x2paddle_237 = self.conv11(x2paddle_161)
|
|
|
+ x2paddle_164 = self.relu11(x2paddle_237)
|
|
|
+ x2paddle_240 = self.conv12(x2paddle_164)
|
|
|
+ x2paddle_167 = paddle.add(x=x2paddle_161, y=x2paddle_240)
|
|
|
+ x2paddle_168 = self.relu12(x2paddle_167)
|
|
|
+ x2paddle_243 = self.conv13(x2paddle_168)
|
|
|
+ x2paddle_174 = self.pad1(x2paddle_168)
|
|
|
+ x2paddle_171 = self.relu13(x2paddle_243)
|
|
|
+ x2paddle_175 = self.pool1(x2paddle_174)
|
|
|
+ x2paddle_246 = self.conv14(x2paddle_171)
|
|
|
+ x2paddle_177 = paddle.multiply(x=x2paddle_175, y=x2paddle_176)
|
|
|
+ x2paddle_178 = paddle.concat(x=[x2paddle_175, x2paddle_177], axis=1)
|
|
|
+ x2paddle_179 = paddle.add(x=x2paddle_178, y=x2paddle_246)
|
|
|
+ x2paddle_180 = self.relu14(x2paddle_179)
|
|
|
+ x2paddle_249 = self.conv15(x2paddle_180)
|
|
|
+ x2paddle_183 = self.relu15(x2paddle_249)
|
|
|
+ x2paddle_252 = self.conv16(x2paddle_183)
|
|
|
+ x2paddle_186 = paddle.add(x=x2paddle_180, y=x2paddle_252)
|
|
|
+ x2paddle_187 = self.relu16(x2paddle_186)
|
|
|
+ x2paddle_255 = self.conv17(x2paddle_187)
|
|
|
+ x2paddle_190 = self.relu17(x2paddle_255)
|
|
|
+ x2paddle_258 = self.conv18(x2paddle_190)
|
|
|
+ x2paddle_193 = paddle.add(x=x2paddle_187, y=x2paddle_258)
|
|
|
+ x2paddle_194 = self.relu18(x2paddle_193)
|
|
|
+ x2paddle_195 = self.pad2(x2paddle_194)
|
|
|
+ x2paddle_196 = self.pool2(x2paddle_195)
|
|
|
+ x2paddle_202 = paddle.reshape(x=x2paddle_196, shape=[1, -1])
|
|
|
+ x2paddle_output_mm = paddle.matmul(x=x2paddle_202, y=x2paddle_fc_weight, transpose_y=True)
|
|
|
+ x2paddle_output_mm = paddle.scale(x=x2paddle_output_mm)
|
|
|
+ x2paddle_output = paddle.add(x=x2paddle_output_mm, y=x2paddle_fc_bias)
|
|
|
+ return x2paddle_output
|
|
|
+
|
|
|
+paddle.disable_static()
|
|
|
+params = paddle.load(r'doc_scan/doc_scan_pd_model/model.pdparams')
|
|
|
+model = Document()
|
|
|
+model.set_dict(params, use_structured_name=True)
|
|
|
+input_spec = InputSpec([1, 3, 32, 32], 'float32', 'x')
|
|
|
+model.eval()
|
|
|
+paddle.jit.save(
|
|
|
+ layer=model,
|
|
|
+ path='document_infer_model/inference_model',
|
|
|
+ input_spec=[input_spec])
|
|
|
+print('document inference model saved in ./document_infer_model')
|