test_pipeline_infer_python.sh 3.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394
  1. #!/bin/bash
  2. source test_tipc/utils_func.sh
  3. FILENAME=$1
  4. MODE="pipeline_infer"
  5. # parser model_name
  6. dataline=$(cat ${FILENAME})
  7. IFS=$'\n'
  8. lines=(${dataline})
  9. model_name=$(func_parser_value "${lines[1]}")
  10. echo "ppdet pipeline_python_infer: ${model_name}"
  11. python=$(func_parser_value "${lines[2]}")
  12. filename_key=$(func_parser_key "${lines[3]}")
  13. filename_value=$(func_parser_value "${lines[3]}")
  14. # parser infer params
  15. infer_mode_list=$(func_parser_value "${lines[5]}")
  16. input_key=$(func_parser_key "${lines[6]}")
  17. input_list=$(func_parser_value "${lines[6]}")
  18. use_gpu=$(func_parser_value "${lines[7]}")
  19. inference_py=$(func_parser_value "${lines[8]}")
  20. use_device_key=$(func_parser_key "${lines[9]}")
  21. use_device_list=$(func_parser_value "${lines[9]}")
  22. image_dir_key=$(func_parser_key "${lines[10]}")
  23. infer_img_dir=$(func_parser_value "${lines[10]}")
  24. video_dir_key=$(func_parser_key "${lines[11]}")
  25. infer_video_dir=$(func_parser_value "${lines[11]}")
  26. LOG_PATH="./test_tipc/output/${model_name}/${MODE}"
  27. mkdir -p ${LOG_PATH}
  28. status_log="${LOG_PATH}/results_serving_python.log"
  29. function func_pipeline_inference(){
  30. IFS='|'
  31. _python=$1
  32. _log_path=$2
  33. _pipeline_script=$3
  34. _infer_dir=$4
  35. _input_type=$5
  36. _device_cmd=$6
  37. _device_type=$7
  38. # inference
  39. pipeline_log_path="${_log_path}/python_pipeline_${_input_type}_${_device_type}.log"
  40. output_path="--output_dir=${LOG_PATH}/"
  41. mot_flag="-o MOT.enable=True"
  42. if [ ${_input_type} = "video" ]; then
  43. pipeline_cmd="${_python} ${_pipeline_script} ${_infer_dir} ${_device_cmd} ${output_path} ${mot_flag} > ${pipeline_log_path} 2>&1 &"
  44. else
  45. pipeline_cmd="${_python} ${_pipeline_script} ${_infer_dir} ${_device_cmd} ${output_path} > ${pipeline_log_path} 2>&1 &"
  46. fi
  47. # run
  48. eval $pipeline_cmd
  49. last_status=${PIPESTATUS[0]}
  50. eval "cat ${pipeline_log_path}"
  51. status_check $last_status "${pipeline_cmd}" "${status_log}" "${model_name}" "${pipeline_log_path}"
  52. }
  53. #run infer
  54. Count=0
  55. IFS="|"
  56. for input in ${input_list[*]}; do
  57. for device_type in ${use_device_list[*]};do
  58. # set cuda device
  59. if [ ${use_gpu} = "False" ] || [ ${device_type} = "cpu" ]; then
  60. device_cmd=$(func_set_params "${use_device_key}" "${device_type}")
  61. elif [ ${use_gpu} = "True" ] && [ ${device_type} = "gpu" ]; then
  62. device_cmd=$(func_set_params "${use_device_key}" "${device_type}")
  63. env="export CUDA_VISIBLE_DEVICES=0"
  64. eval $env
  65. else
  66. echo "Does not support hardware other than CPU and GPU Currently!"
  67. fi
  68. if [ ${input} != "null" ]; then
  69. case ${input} in
  70. image) set_infer_file=$(func_set_params "${image_dir_key}" "${infer_img_dir}") ;;
  71. video) set_infer_file=$(func_set_params "${video_dir_key}" "${infer_video_dir}") ;;
  72. *) echo "Undefined input mode!"; exit 1;
  73. esac
  74. fi
  75. #run inference
  76. func_pipeline_inference "${python}" "${LOG_PATH}" "${inference_py}" ${set_infer_file} ${input} ${device_cmd} ${device_type}
  77. Count=$(($Count + 1))
  78. eval "unset CUDA_VISIBLE_DEVICES"
  79. done
  80. done