Practical Implementation: Splitting and Detecting CPP Code Deployment in a Single File

1. Preface

Recently, X3pi deployed ai vision algorithm, in the initial contact with the X3pi, it provided some python examples, and when the need to deploy cpp code, the deployment document is very few, there should be some cases on the robot platform, in the X3pi cpp deployment document, only one example of image classification was found. I believe that many students in the forum have also encountered similar problems. Therefore, after a period of testing, combined with some open source code, to achieve a simple cpp deployment process, the code is not packaged at present, for reference only.

Code:https://github.com/shouxieai/tensorRT_Pro Code:https://github.com/Rex-LK/ai_arm_learning

Thanks to the above big guys open source code

2, cpp deployment code

2.1. Source of code

    1. Load the model
    1. Allocate memory
    1. Reasoning
  • 4, post-processing

    1. Release memory The difference between segmentation and detection is only in the post-processing process, the other processes are completely consistent, so only need to be classified on the basis of the code, plus the segmentation and detection code, you can make it to be divided and detected, the following is part of the post-processing code.

    string model_path = “../FastestDet_nv12.bin”;
    auto modelFileName = model_path.c_str();
    string image_path = “../det.png”;
    Mat image0 = imread(“../det.png”);
    Mat image = image0.clone();
    if(detect_type==“segment”){
    int *shape = output->properties.validShape.dimensionSize;
    int out_h = shape[1];
    int out_w = shape[2];
    int out_nclass = shape[3];

    Mat unet_prob, iclass;
    
    resize(image,image,Size(out_w,out_h));
    
    tie(unet_prob, iclass) = post_process(data, out_w, out_h, out_nclass, 0);
    imwrite("unet_prob.jpg", unet_prob);  
    cout<<"Done, Save to image-prob.jpg"<<endl;
    imwrite("unet_mask.jpg", iclass);  
    cout<<"Done, Save to image-mask.jpg"<<endl;
    
    render(image, unet_prob, iclass);
    resize(image,image,Size(image_w,image_h));
    imwrite("unet-rgb.jpg", image);
    cout<<"Done, Save to unet-rgb.jpg"<<endl;
    

    }
    else if(detect_type == “detect”){
    int cols = 85;
    int num_classes = cols - 5;
    int rows = 25200;
    float confidence_threshold = 0.4;
    float nms_threshold = 0.6;
    vector BBoxes = decodeBbox(data,rows,cols,confidence_threshold,num_classes);
    vector resBBoxes = nms(BBoxes,nms_threshold);

    Mat drawMat = image0.clone();
    Mat show_img;
    resize(image0,show_img,Size(640,640));

    for (auto& box : resBBoxes) {
    rectangle(show_img, Point(box.left, box.top),
    Point(box.right, box.bottom), Scalar(0, 255, 0), 2);
    putText(show_img, format(“%.2f”, box.confidence),
    Point(box.left, box.top - 10), 0, 0.8,
    Scalar(0, 0, 255), 2, 2);
    }
    imwrite(“det.jpg”,show_img);
    cout<<“Done, Save to det.jpg”<<endl;
    }

2.2, CMakeLists.txt and dependent environments

The dependency files for compiling this project are available in docker at /root/.horizon/ddk/xj3_aarch64 And horizon_xj3_open_explorer_v2.3.3_20220727/ddk/samples/ai_toolchain/horizon_runtime_sample/code/deps_gcc9.3 inside Here. At the same time can also be downloaded in the above Baidu cloud link. This project supports compilation and running in docker, compilation in docker and running on x3, which is controlled by whether to annotate SET(tar x3).

cmake_minimum_required(VERSION 2.8)
project(test)
SET(tar x3)

if(tar)
    message(STATUS "build arm")
    SET(CMAKE_C_COMPILER /opt/gcc-ubuntu-9.3.0-2020.03-x86_64-aarch64-linux-gnu/bin/aarch64-linux-gnu-gcc)
    SET(CMAKE_CXX_COMPILER /opt/gcc-ubuntu-9.3.0-2020.03-x86_64-aarch64-linux-gnu/bin/aarch64-linux-gnu-g++)
else()
    message(STATUS "build x86")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 ")

if(tar)
    set(DEPS_DIR ${CMAKE_CURRENT_SOURCE_DIR}/deps/aarch64/) 
else()
    set(DEPS_DIR ${CMAKE_CURRENT_SOURCE_DIR}/deps/x86/) 
endif()

add_definitions(-w)

if(tar)
    include_directories(
            ${CMAKE_CURRENT_SOURCE_DIR}/include
            ${DEPS_DIR}/dnn/include
            ${DEPS_DIR}/glog/include
            ${DEPS_DIR}/gflags/include
            ${DEPS_DIR}/opencv/include)

    link_directories(
        ${DEPS_DIR}/dnn/lib
        ${DEPS_DIR}/appsdk/appuser/lib
        ${DEPS_DIR}/appsdk/appuser/lib/hbbpu
        ${DEPS_DIR}/glog/lib
        ${DEPS_DIR}/gflags/lib
        ${DEPS_DIR}/opencv/lib)

    include_directories(${LIB_DIR_OPENCV}/include/)
    link_directories(${LIB_DIR_OPENCV}/lib/)
    SET(LINK_libs dnn gflags glog opencv_world zlib dl rt pthread dnn)
else()
    include_directories(
        ${CMAKE_CURRENT_SOURCE_DIR}/include
        ${DEPS_DIR}/dnn_x86/include/dnn
        ${DEPS_DIR}/glog/include
        ${DEPS_DIR}/gflags/include
        ${DEPS_DIR}/opencv/include)

    link_directories(
        ${DEPS_DIR}/dnn_x86/lib
        ${DEPS_DIR}/glog/lib
        ${DEPS_DIR}/gflags/lib
        ${DEPS_DIR}/opencv/lib)
        SET(LINK_libs dnn hbdk_sim_x86 gflags glog opencv_world zlib dl rt pthread)
endif()


add_executable(run_x3 src/run_x3.cc)
target_link_libraries(run_x3 ${LINK_libs})

3、run

cd x3_demo
mkdir build && cd build
cmake ..
make -j
./run_x3.

4, summary

After a series of tests and tests, finally can happily on the board cpp code deployment, familiar with this project, I believe that the subsequent development process will become more simple, this article is just a simple sign of the cpp deployment process, the code has a lot of room for improvement, I hope there will be time in the future x3 on the test of other projects.