From 192b431630219fcb49b3ae092015cc1e4d28bc52 Mon Sep 17 00:00:00 2001 From: Jolin Zhang46 Date: Tue, 15 Sep 2020 14:41:31 +0800 Subject: [PATCH] update lite quick start --- .../source_en/quick_start/quick_start.md | 133 ++++++++--------- .../source_zh_cn/quick_start/quick_start.md | 136 +++++++++--------- 2 files changed, 127 insertions(+), 142 deletions(-) diff --git a/lite/tutorials/source_en/quick_start/quick_start.md b/lite/tutorials/source_en/quick_start/quick_start.md index b0712f03d6..8ca14849e0 100644 --- a/lite/tutorials/source_en/quick_start/quick_start.md +++ b/lite/tutorials/source_en/quick_start/quick_start.md @@ -54,9 +54,9 @@ The following section describes how to build and execute an on-device image clas - Android Studio 3.2 or later (Android 4.0 or later is recommended.) - Native development kit (NDK) 21.3 -- CMake 3.10.2 +- [CMake](https://cmake.org/download) 3.10.2 - Android software development kit (SDK) 26 or later -- OpenCV 4.0.0 or later (included in the sample code) +- [JDK]( https://www.oracle.com/downloads/otn-pub/java/JDK/) 1.8 or later ### Building and Running @@ -80,6 +80,8 @@ The following section describes how to build and execute an on-device image clas For details about how to connect the Android Studio to a device for debugging, see . + The mobile phone needs to be turn on "USB debugging mode" before Android Studio can recognize the mobile phone. Huawei mobile phones generally turn on "USB debugging model" in Settings > system and update > developer Options > USB debugging. + 3. Continue the installation on the Android device. After the installation is complete, you can view the content captured by a camera and the inference result. ![result](../images/lite_quick_start_app_result.png) @@ -95,23 +97,14 @@ This image classification sample program on the Android device includes a Java l ``` app -| -├── libs # library files that store MindSpore Lite dependencies -│ └── arm64-v8a -│ ├── libopencv_java4.so -│ └── libmindspore-lite.so │ -├── opencv # dependency files related to OpenCV -│ └── ... -| ├── src/main │ ├── assets # resource files -| | └── model.ms # model file +| | └── mobilenetv2.ms # model file │ | │ ├── cpp # main logic encapsulation classes for model loading and prediction -| | ├── include # header files related to MindSpore calling -| | | └── ... -│ | | +| | |── ... +| | ├── mindspore_lite_x.x.x-minddata-arm64-cpu` #MindSpore Lite version | | ├── MindSporeNetnative.cpp # JNI methods related to MindSpore calling │ | └── MindSporeNetnative.h # header file │ | @@ -119,7 +112,7 @@ app │ │ └── com.huawei.himindsporedemo │ │ ├── gallery.classify # implementation related to image processing and MindSpore JNI calling │ │ │ └── ... -│ │ └── obejctdetect # implementation related to camera enabling and drawing +│ │ └── widget # implementation related to camera enabling and drawing │ │ └── ... │ │ │ ├── res # resource files related to Android @@ -128,6 +121,7 @@ app ├── CMakeList.txt # CMake compilation entry file │ ├── build.gradle # Other Android configuration file +├── download.gradle # MindSpore version download └── ... ``` @@ -156,42 +150,40 @@ android{ Create a link to the `.so` library file in the `app/CMakeLists.txt` file: ``` -# Set MindSpore Lite Dependencies. -include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/include/MindSpore) +# ============== Set MindSpore Dependencies. ============= +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/third_party/flatbuffers/include) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema) + add_library(mindspore-lite SHARED IMPORTED ) -set_target_properties(mindspore-lite PROPERTIES - IMPORTED_LOCATION "${CMAKE_SOURCE_DIR}/libs/libmindspore-lite.so") +add_library(minddata-lite SHARED IMPORTED ) -# Set OpenCV Dependecies. -include_directories(${CMAKE_SOURCE_DIR}/opencv/sdk/native/jni/include) -add_library(lib-opencv SHARED IMPORTED ) -set_target_properties(lib-opencv PROPERTIES - IMPORTED_LOCATION "${CMAKE_SOURCE_DIR}/libs/libopencv_java4.so") +set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so) +set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libminddata-lite.so) +# --------------- MindSpore Lite set End. -------------------- # Link target library. target_link_libraries( ... - mindspore-lite - lib-opencv + # --- mindspore --- + minddata-lite + mindspore-lite ... ) ``` -In this example, the download.gradle File configuration auto download ` libmindspot-lite.so `and `libopencv_ Java4.so` library file, placed in the 'app / libs / arm64-v8a' directory. +In this example, the download.gradle File configuration auto download MindSpore Lite version, placed in the '`app / src / main/cpp/mindspore_lite_x.x.x-minddata-arm64-cpu`' directory. Note: if the automatic download fails, please manually download the relevant library files and put them in the corresponding location. -libmindspore-lite.so [libmindspore-lite.so]( https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%200.7/libmindspore-lite.so) - -libmindspore-lite include [libmindspore-lite include]( https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%200.7/include.zip) - -libopencv_java4.so [libopencv_java4.so](https://download.mindspore.cn/model_zoo/official/lite/lib/opencv%204.4.0/libopencv_java4.so) - -libopencv include [libopencv include]( https://download.mindspore.cn/model_zoo/official/lite/lib/opencv%204.4.0/include.zip) - - +MindSpore Lite version [MindSpore Lite version]( https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%200.7/libmindspore-lite.so) ### Downloading and Deploying a Model File @@ -201,8 +193,6 @@ Note: if the automatic download fails, please manually download the relevant lib mobilenetv2.ms [mobilenetv2.ms]( https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite/mobilenetv2.ms) - - ### Compiling On-Device Inference Code Call MindSpore Lite C++ APIs at the JNI layer to implement on-device inference. @@ -225,10 +215,8 @@ The inference code process is as follows. For details about the complete code, s *labelEnv = labelNet; // Create context. - lite::Context *context = new lite::Context; - - context->device_ctx_.type = lite::DT_CPU; - context->thread_num_ = numThread; //Specify the number of threads to run inference + mindspore::lite::Context *context = new mindspore::lite::Context; + context->thread_num_ = num_thread; // Create the mindspore session. labelNet->CreateSessionMS(modelBuffer, bufferLen, "device label", context); @@ -289,7 +277,12 @@ The inference code process is as follows. For details about the complete code, s - Obtain the output data. ```cpp - auto msOutputs = mSession->GetOutputs(); + auto names = mSession->GetOutputTensorNames(); + std::unordered_map msOutputs; + for (const auto &name : names) { + auto temp_dat =mSession->GetOutputByTensorName(name); + msOutputs.insert(std::pair {name, temp_dat}); + } std::string retStr = ProcessRunnetResult(msOutputs, ret); ``` @@ -298,39 +291,35 @@ The inference code process is as follows. For details about the complete code, s std::string ProcessRunnetResult(std::unordered_map msOutputs, int runnetRet) { - // Get model output results. - std::unordered_map::iterator iter; - iter = msOutputs.begin(); - auto brach1_string = iter->first; - auto branch1_tensor = iter->second; + std::unordered_map::iterator iter; + iter = msOutputs.begin(); - int OUTPUTS_LEN = branch1_tensor->ElementsNum(); + // The mobilenetv2.ms model output just one branch. + auto outputTensor = iter->second; + int tensorNum = outputTensor->ElementsNum(); + MS_PRINT("Number of tensor elements:%d", tensorNum); - float *temp_scores = static_cast(branch1_tensor->MutableData()); + // Get a pointer to the first score. + float *temp_scores = static_cast(outputTensor->MutableData()); - float scores[RET_CATEGORY_SUM]; - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - scores[i] = temp_scores[i]; + float scores[RET_CATEGORY_SUM]; + for (int i = 0; i < RET_CATEGORY_SUM; ++i) { + if (temp_scores[i] > 0.5) { + MS_PRINT("MindSpore scores[%d] : [%f]", i, temp_scores[i]); } + scores[i] = temp_scores[i]; + } - // Converted to text information that needs to be displayed in the APP. - std::string retStr = ""; - if (runnetRet == 0) { - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - if (scores[i] > 0.3){ - retStr += g_labels_name_map[i]; - retStr += ":"; - std::string score_str = std::to_string(scores[i]); - retStr += score_str; - retStr += ";"; - } - } - else { - MS_PRINT("MindSpore run net failed!"); - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - retStr += " :0.0;"; - } - } - return retStr; + // Score for each category. + // Converted to text information that needs to be displayed in the APP. + std::string categoryScore = ""; + for (int i = 0; i < RET_CATEGORY_SUM; ++i) { + categoryScore += labels_name_map[i]; + categoryScore += ":"; + std::string score_str = std::to_string(scores[i]); + categoryScore += score_str; + categoryScore += ";"; + } + return categoryScore; } ``` \ No newline at end of file diff --git a/lite/tutorials/source_zh_cn/quick_start/quick_start.md b/lite/tutorials/source_zh_cn/quick_start/quick_start.md index ef76d900d3..5d36ffe589 100644 --- a/lite/tutorials/source_zh_cn/quick_start/quick_start.md +++ b/lite/tutorials/source_zh_cn/quick_start/quick_start.md @@ -53,9 +53,9 @@ MindSpore Model Zoo中图像分类模型可[在此下载](https://download.minds - Android Studio >= 3.2 (推荐4.0以上版本) - NDK 21.3 -- CMake 3.10.2 +- [CMake](https://cmake.org/download) 3.10.2 - Android SDK >= 26 -- OpenCV >= 4.0.0 (本示例代码已包含) +- [JDK]( https://www.oracle.com/downloads/otn-pub/java/JDK/) >= 1.8 ### 构建与运行 @@ -79,10 +79,14 @@ MindSpore Model Zoo中图像分类模型可[在此下载](https://download.minds Android Studio连接设备调试操作,可参考。 + 手机需开启“USB调试模式”,Android Studio才能识别到手机。 华为手机一般在设置->系统和更新->开发人员选项->USB调试中开始“USB调试模型”。 + 3. 在Android设备上,点击“继续安装”,安装完即可查看到设备摄像头捕获的内容和推理结果。 ![install](../images/lite_quick_start_install.png) + + 识别结果如下图所示。 ![result](../images/lite_quick_start_app_result.png) @@ -98,29 +102,22 @@ MindSpore Model Zoo中图像分类模型可[在此下载](https://download.minds ``` app -| -├── libs # 存放MindSpore Lite依赖的库文件 -│ └── arm64-v8a -│ ├── libopencv_java4.so -│ └── libmindspore-lite.so -│ -├── opencv # opencv 相关依赖文件 -│ └── ... -| ├── src/main │ ├── assets # 资源文件 -| | └── model.ms # 存放模型文件 +| | └── mobilenetv2.ms # 存放模型文件 │ | │ ├── cpp # 模型加载和预测主要逻辑封装类 | | ├── .. +| | ├── mindspore_lite_x.x.x-minddata-arm64-cpu` #MindSpore Lite版本 | | ├── MindSporeNetnative.cpp # MindSpore调用相关的JNI方法 │ | └── MindSporeNetnative.h # 头文件 +| | └── MsNetWork.cpp # MindSpre接口封装 │ | │ ├── java # java层应用代码 │ │ └── com.huawei.himindsporedemo │ │ ├── gallery.classify # 图像处理及MindSpore JNI调用相关实现 │ │ │ └── ... -│ │ └── obejctdetect # 开启摄像头及绘制相关实现 +│ │ └── widget # 开启摄像头及绘制相关实现 │ │ └── ... │ │ │ ├── res # 存放Android相关的资源文件 @@ -129,26 +126,19 @@ app ├── CMakeList.txt # cmake编译入口文件 │ ├── build.gradle # 其他Android配置文件 +├── download.gradle # 工程依赖文件下载 └── ... ``` ### 配置MindSpore Lite依赖项 -Android JNI层调用MindSpore C++ API时,需要相关库文件支持。可通过MindSpore Lite[源码编译](https://www.mindspore.cn/lite/tutorial/zh-CN/master/build.html)生成`libmindspore-lite.so`库文件。 +Android JNI层调用MindSpore C++ API时,需要相关库文件支持。可通过MindSpore Lite源码编译生成`libmindspore-lite.so`库文件。 -本示例中,bulid过程由download.gradle文件配置自动下载`libmindspore-lite.so`以及OpenCV的`libopencv_java4.so`库文件,并放置在`app/libs/arm64-v8a`目录下。 +本示例中,build过程由download.gradle文件自动从华为服务器下载MindSpore Lite 版本文件,并放置在`app / src / main/cpp/mindspore_lite_x.x.x-minddata-arm64-cpu`目录下。 注: 若自动下载失败,请手动下载相关库文件并将其放在对应位置: -libmindspore-lite.so [下载链接](https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%200.7/libmindspore-lite.so) - -libmindspore-lite include文件 [下载链接](https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%200.7/include.zip) - -libopencv_java4.so [下载链接](https://download.mindspore.cn/model_zoo/official/lite/lib/opencv%204.4.0/libopencv_java4.so) - -libopencv include文件 [下载链接](https://download.mindspore.cn/model_zoo/official/lite/lib/opencv%204.4.0/include.zip) - - +MindSpore Lite版本 [下载链接](https://download.mindspore.cn/model_zoo/official/lite/lib/mindspore%20version%200.7/libmindspore-lite.so) ``` android{ @@ -169,23 +159,29 @@ android{ 在`app/CMakeLists.txt`文件中建立`.so`库文件链接,如下所示。 ``` -# Set MindSpore Lite Dependencies. -include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/include/MindSpore) +# ============== Set MindSpore Dependencies. ============= +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/third_party/flatbuffers/include) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema) + add_library(mindspore-lite SHARED IMPORTED ) -set_target_properties(mindspore-lite PROPERTIES - IMPORTED_LOCATION "${CMAKE_SOURCE_DIR}/libs/libmindspore-lite.so") +add_library(minddata-lite SHARED IMPORTED ) -# Set OpenCV Dependecies. -include_directories(${CMAKE_SOURCE_DIR}/opencv/sdk/native/jni/include) -add_library(lib-opencv SHARED IMPORTED ) -set_target_properties(lib-opencv PROPERTIES - IMPORTED_LOCATION "${CMAKE_SOURCE_DIR}/libs/libopencv_java4.so") +set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so) +set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libminddata-lite.so) +# --------------- MindSpore Lite set End. -------------------- # Link target library. target_link_libraries( ... - mindspore-lite - lib-opencv + # --- mindspore --- + minddata-lite + mindspore-lite ... ) ``` @@ -218,13 +214,12 @@ target_link_libraries( *labelEnv = labelNet; // Create context. - lite::Context *context = new lite::Context; - context->device_ctx_.type = lite::DT_CPU; - context->thread_num_ = numThread; //Specify the number of threads to run inference + mindspore::lite::Context *context = new mindspore::lite::Context; + context->thread_num_ = num_thread; // Create the mindspore session. - labelNet->CreateSessionMS(modelBuffer, bufferLen, "device label", context); - delete(context); + labelNet->CreateSessionMS(modelBuffer, bufferLen, context); + delete (context); ``` @@ -281,7 +276,12 @@ target_link_libraries( - 获取输出数据。 ```cpp - auto msOutputs = mSession->GetOutputs(); + auto names = mSession->GetOutputTensorNames(); + std::unordered_map msOutputs; + for (const auto &name : names) { + auto temp_dat =mSession->GetOutputByTensorName(name); + msOutputs.insert(std::pair {name, temp_dat}); + } std::string retStr = ProcessRunnetResult(msOutputs, ret); ``` @@ -290,39 +290,35 @@ target_link_libraries( std::string ProcessRunnetResult(std::unordered_map msOutputs, int runnetRet) { - // Get model output results. - std::unordered_map::iterator iter; - iter = msOutputs.begin(); - auto brach1_string = iter->first; - auto branch1_tensor = iter->second; + std::unordered_map::iterator iter; + iter = msOutputs.begin(); - int OUTPUTS_LEN = branch1_tensor->ElementsNum(); + // The mobilenetv2.ms model output just one branch. + auto outputTensor = iter->second; + int tensorNum = outputTensor->ElementsNum(); + MS_PRINT("Number of tensor elements:%d", tensorNum); - float *temp_scores = static_cast(branch1_tensor->MutableData()); - float scores[RET_CATEGORY_SUM]; - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - scores[i] = temp_scores[i]; - } + // Get a pointer to the first score. + float *temp_scores = static_cast(outputTensor->MutableData()); - // Converted to text information that needs to be displayed in the APP. - std::string retStr = ""; - if (runnetRet == 0) { - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - if (scores[i] > 0.3){ - retStr += g_labels_name_map[i]; - retStr += ":"; - std::string score_str = std::to_string(scores[i]); - retStr += score_str; - retStr += ";"; - } - } - else { - MS_PRINT("MindSpore run net failed!"); - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - retStr += " :0.0;"; - } + float scores[RET_CATEGORY_SUM]; + for (int i = 0; i < RET_CATEGORY_SUM; ++i) { + if (temp_scores[i] > 0.5) { + MS_PRINT("MindSpore scores[%d] : [%f]", i, temp_scores[i]); } + scores[i] = temp_scores[i]; + } - return retStr; + // Score for each category. + // Converted to text information that needs to be displayed in the APP. + std::string categoryScore = ""; + for (int i = 0; i < RET_CATEGORY_SUM; ++i) { + categoryScore += labels_name_map[i]; + categoryScore += ":"; + std::string score_str = std::to_string(scores[i]); + categoryScore += score_str; + categoryScore += ";"; + } + return categoryScore; } ``` -- Gitee