test_paddle2onnx.sh 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130
  1. #!/bin/bash
  2. source test_tipc/utils_func.sh
  3. FILENAME=$1
  4. MODE="paddle2onnx_infer"
  5. # parser model_name
  6. dataline=$(cat ${FILENAME})
  7. IFS=$'\n'
  8. lines=(${dataline})
  9. model_name=$(func_parser_value "${lines[1]}")
  10. echo "ppdet onnx_infer: ${model_name}"
  11. python=$(func_parser_value "${lines[2]}")
  12. filename_key=$(func_parser_key "${lines[3]}")
  13. filename_value=$(func_parser_value "${lines[3]}")
  14. # export params
  15. save_export_key=$(func_parser_key "${lines[5]}")
  16. save_export_value=$(func_parser_value "${lines[5]}")
  17. export_weight_key=$(func_parser_key "${lines[6]}")
  18. export_weight_value=$(func_parser_value "${lines[6]}")
  19. norm_export=$(func_parser_value "${lines[7]}")
  20. pact_export=$(func_parser_value "${lines[8]}")
  21. fpgm_export=$(func_parser_value "${lines[9]}")
  22. distill_export=$(func_parser_value "${lines[10]}")
  23. export_key1=$(func_parser_key "${lines[11]}")
  24. export_value1=$(func_parser_value "${lines[11]}")
  25. export_param_key=$(func_parser_key "${lines[12]}")
  26. export_param_value=$(func_parser_value "${lines[12]}")
  27. kl_quant_export=$(func_parser_value "${lines[13]}")
  28. # parser paddle2onnx params
  29. infer_mode_list=$(func_parser_value "${lines[15]}")
  30. infer_is_quant_list=$(func_parser_value "${lines[16]}")
  31. padlle2onnx_cmd=$(func_parser_value "${lines[17]}")
  32. model_dir_key=$(func_parser_key "${lines[18]}")
  33. model_filename_key=$(func_parser_key "${lines[19]}")
  34. model_filename_value=$(func_parser_value "${lines[19]}")
  35. params_filename_key=$(func_parser_key "${lines[20]}")
  36. params_filename_value=$(func_parser_value "${lines[20]}")
  37. save_file_key=$(func_parser_key "${lines[21]}")
  38. save_file_value=$(func_parser_value "${lines[21]}")
  39. opset_version_key=$(func_parser_key "${lines[22]}")
  40. opset_version_value=$(func_parser_value "${lines[22]}")
  41. enable_onnx_checker_key=$(func_parser_key "${lines[23]}")
  42. enable_onnx_checker_value=$(func_parser_value "${lines[23]}")
  43. paddle2onnx_params1_key=$(func_parser_key "${lines[24]}")
  44. paddle2onnx_params1_value=$(func_parser_value "${lines[24]}")
  45. # parser onnx inference
  46. inference_py=$(func_parser_value "${lines[25]}")
  47. infer_cfg_key=$(func_parser_key "${lines[26]}")
  48. onnx_file_key=$(func_parser_key "${lines[27]}")
  49. infer_image_key=$(func_parser_key "${lines[28]}")
  50. infer_image_value=$(func_parser_value "${lines[28]}")
  51. infer_param1_key=$(func_parser_key "${lines[29]}")
  52. infer_param1_value=$(func_parser_value "${lines[29]}")
  53. LOG_PATH="./test_tipc/output/${model_name}/${MODE}"
  54. mkdir -p ${LOG_PATH}
  55. status_log="${LOG_PATH}/results_paddle2onnx.log"
  56. function func_paddle2onnx_inference(){
  57. IFS='|'
  58. _python=$1
  59. _log_path=$2
  60. _export_model_dir=$3
  61. # paddle2onnx
  62. echo "################### run paddle2onnx ###################"
  63. set_dirname=$(func_set_params "${model_dir_key}" "${_export_model_dir}")
  64. set_model_filename=$(func_set_params "${model_filename_key}" "${model_filename_value}")
  65. set_params_filename=$(func_set_params "${params_filename_key}" "${params_filename_value}")
  66. set_save_model=$(func_set_params "${save_file_key}" "${_export_model_dir}/${save_file_value}")
  67. set_opset_version=$(func_set_params "${opset_version_key}" "${opset_version_value}")
  68. set_enable_onnx_checker=$(func_set_params "${enable_onnx_checker_key}" "${enable_onnx_checker_value}")
  69. set_paddle2onnx_params1=$(func_set_params "${paddle2onnx_params1_key}" "${paddle2onnx_params1_value}")
  70. trans_log_path="${_log_path}/trans_model.log"
  71. trans_model_cmd="${padlle2onnx_cmd} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_save_model} ${set_opset_version} ${set_enable_onnx_checker} ${set_paddle2onnx_params1}"
  72. eval "${trans_model_cmd} > ${trans_log_path} 2>&1"
  73. last_status=${PIPESTATUS[0]}
  74. cat ${trans_log_path}
  75. status_check $last_status "${trans_model_cmd}" "${status_log}" "${model_name}" "${trans_log_path}"
  76. # python inference
  77. echo "################### run onnx infer ###################"
  78. set_infer_cfg=$(func_set_params "${infer_cfg_key}" "${_export_model_dir}/infer_cfg.yml")
  79. set_onnx_file=$(func_set_params "${onnx_file_key}" "${_export_model_dir}/${save_file_value}")
  80. set_infer_image_file=$(func_set_params "${infer_image_key}" "${infer_image_value}")
  81. set_infer_param1=$(func_set_params "${infer_param1_key}" "${infer_param1_value}")
  82. _save_log_path="${_log_path}/paddle2onnx_infer_cpu.log"
  83. infer_model_cmd="${python} ${inference_py} ${set_infer_cfg} ${set_onnx_file} ${set_infer_image_file} ${set_infer_param1}"
  84. eval "${infer_model_cmd} > ${_save_log_path} 2>&1"
  85. last_status=${PIPESTATUS[0]}
  86. cat ${_save_log_path}
  87. status_check $last_status "${infer_model_cmd}" "${status_log}" "${model_name}" "${_save_log_path}"
  88. }
  89. export Count=0
  90. IFS="|"
  91. for infer_mode in ${infer_mode_list[*]}; do
  92. if [ ${infer_mode} != "null" ]; then
  93. # run export
  94. case ${infer_mode} in
  95. norm) run_export=${norm_export} ;;
  96. quant) run_export=${pact_export} ;;
  97. fpgm) run_export=${fpgm_export} ;;
  98. distill) run_export=${distill_export} ;;
  99. kl_quant) run_export=${kl_quant_export} ;;
  100. *) echo "Undefined infer_mode!"; exit 1;
  101. esac
  102. set_export_weight=$(func_set_params "${export_weight_key}" "${export_weight_value}")
  103. set_save_export_dir=$(func_set_params "${save_export_key}" "${save_export_value}")
  104. set_filename=$(func_set_params "${filename_key}" "${model_name}")
  105. set_export_param=$(func_set_params "${export_param_key}" "${export_param_value}")
  106. export_log_path="${LOG_PATH}/export.log"
  107. export_cmd="${python} ${run_export} ${set_export_weight} ${set_filename} ${set_export_param} ${set_save_export_dir} "
  108. echo $export_cmd
  109. eval "${export_cmd} > ${export_log_path} 2>&1"
  110. status_export=$?
  111. cat ${export_log_path}
  112. status_check $status_export "${export_cmd}" "${status_log}" "${model_name}" "${export_log_path}"
  113. fi
  114. #run inference
  115. export_model_dir="${save_export_value}/${model_name}"
  116. func_paddle2onnx_inference "${python}" "${LOG_PATH}" "${export_model_dir}"
  117. Count=$(($Count + 1))
  118. done