From f0b3f9def5b6d8a4a6c8af6cfebfa4507babe1a7 Mon Sep 17 00:00:00 2001 From: Jolin Zhang46 Date: Thu, 29 Oct 2020 21:13:40 +0800 Subject: [PATCH] update lite quick start --- .../lite/source_en/quick_start/quick_start.md | 45 ++++++++++++------- .../source_zh_cn/quick_start/quick_start.md | 42 +++++++++++------ 2 files changed, 58 insertions(+), 29 deletions(-) diff --git a/tutorials/lite/source_en/quick_start/quick_start.md b/tutorials/lite/source_en/quick_start/quick_start.md index 3aec79eae3..2e29f128f6 100644 --- a/tutorials/lite/source_en/quick_start/quick_start.md +++ b/tutorials/lite/source_en/quick_start/quick_start.md @@ -31,10 +31,6 @@ This tutorial demonstrates the on-device deployment process based on the image c > Click to find [Android image classification models](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite) and [sample code](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/image_classification). -We provide the APK file corresponding to this example. You can scan the QR code below or [download](https://download.mindspore.cn/model_zoo/official/lite/apk/label/Classification.apk) it directly, and deploy it to Android devices for use. - -![apk](../images/classification_apk.png) - ## Selecting a Model The MindSpore team provides a series of preset device models that you can use in your application. @@ -108,7 +104,7 @@ app │ | │ ├── cpp # main logic encapsulation classes for model loading and prediction | | |── ... -| | ├── mindspore_lite_1.0.0-minddata-arm64-cpu` #MindSpore Lite version +| | ├── mindspore_lite_1.0.1-minddata-arm64-cpu` #MindSpore Lite version | | ├── MindSporeNetnative.cpp # JNI methods related to MindSpore calling │ | └── MindSporeNetnative.h # header file │ | @@ -133,11 +129,11 @@ app When MindSpore C++ APIs are called at the Android JNI layer, related library files are required. You can use MindSpore Lite [source code compilation](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html) to generate the MindSpore Lite version. In this case, you need to use the compile command of generate with image preprocessing module. -In this example, the build process automatically downloads the `mindspore-lite-1.0.0-minddata-arm64-cpu` by the `app/download.gradle` file and saves in the `app/src/main/cpp` directory. +In this example, the build process automatically downloads the `mindspore-lite-1.0.1-minddata-arm64-cpu` by the `app/download.gradle` file and saves in the `app/src/main/cpp` directory. Note: if the automatic download fails, please manually download the relevant library files and put them in the corresponding location. -mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz [Download link](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz) +mindspore-lite-1.0.1-minddata-arm64-cpu.tar.gz [Download link](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/mindspore-lite-1.0.1-minddata-arm64-cpu.tar.gz) ```text android{ @@ -165,14 +161,18 @@ include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include) add_library(mindspore-lite SHARED IMPORTED ) add_library(minddata-lite SHARED IMPORTED ) +add_library(mindspore-lite-fp16 SHARED IMPORTED ) set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so) set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libminddata-lite.so) +set_target_properties(mindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite-fp16.so) # --------------- MindSpore Lite set End. -------------------- # Link target library. @@ -181,6 +181,7 @@ target_link_libraries( # --- mindspore --- minddata-lite mindspore-lite + mindspore-lite-fp16 ... ) ``` @@ -215,15 +216,18 @@ The inference code process is as follows. For details about the complete code, s void **labelEnv = new void *; MSNetWork *labelNet = new MSNetWork; *labelEnv = labelNet; - + // Create context. mindspore::lite::Context *context = new mindspore::lite::Context; context->thread_num_ = num_thread; - + context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; + context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false; + context->device_list_[0].device_type_ = mindspore::lite::DT_CPU; + // Create the mindspore session. labelNet->CreateSessionMS(modelBuffer, bufferLen, "device label", context); delete(context); - + ``` - Load the model file and build a computational graph for inference. @@ -339,12 +343,23 @@ The inference code process is as follows. For details about the complete code, s float scores[RET_CATEGORY_SUM]; for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - if (temp_scores[i] > 0.5) { - MS_PRINT("MindSpore scores[%d] : [%f]", i, temp_scores[i]); - } scores[i] = temp_scores[i]; } - + + + float unifiedThre = 0.5; + float probMax = 1.0; + for (size_t i = 0; i < RET_CATEGORY_SUM; ++i) { + float threshold = g_thres_map[i]; + float tmpProb = scores[i]; + if (tmpProb < threshold) { + tmpProb = tmpProb / threshold * unifiedThre; + } else { + tmpProb = (tmpProb - threshold) / (probMax - threshold) * unifiedThre + unifiedThre; + } + scores[i] = tmpProb; + } + // Score for each category. // Converted to text information that needs to be displayed in the APP. std::string categoryScore = ""; @@ -357,4 +372,4 @@ The inference code process is as follows. For details about the complete code, s } return categoryScore; } - ``` + ``` diff --git a/tutorials/lite/source_zh_cn/quick_start/quick_start.md b/tutorials/lite/source_zh_cn/quick_start/quick_start.md index 75d6e2eda5..d1a8bb97db 100644 --- a/tutorials/lite/source_zh_cn/quick_start/quick_start.md +++ b/tutorials/lite/source_zh_cn/quick_start/quick_start.md @@ -31,10 +31,6 @@ > 你可以在这里找到[Android图像分类模型](https://download.mindspore.cn/model_zoo/official/lite/mobilenetv2_openimage_lite)和[示例代码](https://gitee.com/mindspore/mindspore/tree/master/model_zoo/official/lite/image_classification)。 -我们提供了本示例对应的APK文件,你可扫描下方的二维码或直接[下载](https://download.mindspore.cn/model_zoo/official/lite/apk/label/Classification.apk),并部署到Android设备后使用。 - -![apk](../images/classification_apk.png) - ## 选择模型 MindSpore团队提供了一系列预置终端模型,你可以在应用程序中使用这些预置的终端模型。 @@ -111,7 +107,7 @@ app │ | │ ├── cpp # 模型加载和预测主要逻辑封装类 | | ├── .. -| | ├── mindspore-lite-1.0.0-minddata-arm64-cpu # MindSpore Lite版本 +| | ├── mindspore-lite-1.0.1-minddata-arm64-cpu # MindSpore Lite版本 | | ├── MindSporeNetnative.cpp # MindSpore调用相关的JNI方法 │ | └── MindSporeNetnative.h # 头文件 | | └── MsNetWork.cpp # MindSpore接口封装 @@ -147,7 +143,7 @@ Android JNI层调用MindSpore C++ API时,需要相关库文件支持。可通 注: 若自动下载失败,请手动下载相关库文件,解压后将其放在对应位置: - mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz [下载链接](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz) + mindspore-lite-1.0.1-minddata-arm64-cpu.tar.gz [下载链接](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/mindspore-lite-1.0.1-minddata-arm64-cpu.tar.gz) ```text android{ @@ -175,14 +171,18 @@ include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include) add_library(mindspore-lite SHARED IMPORTED ) add_library(minddata-lite SHARED IMPORTED ) +add_library(mindspore-lite-fp16 SHARED IMPORTED ) set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so) set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libminddata-lite.so) +set_target_properties(mindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite-fp16.so) # --------------- MindSpore Lite set End. -------------------- # Link target library. @@ -191,6 +191,7 @@ target_link_libraries( # --- mindspore --- minddata-lite mindspore-lite + mindspore-lite-fp16 ... ) ``` @@ -223,15 +224,18 @@ target_link_libraries( void **labelEnv = new void *; MSNetWork *labelNet = new MSNetWork; *labelEnv = labelNet; - + // Create context. mindspore::lite::Context *context = new mindspore::lite::Context; context->thread_num_ = num_thread; - + context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; + context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = false; + context->device_list_[0].device_type_ = mindspore::lite::DT_CPU; + // Create the mindspore session. labelNet->CreateSessionMS(modelBuffer, bufferLen, context); delete (context); - + ``` - 加载模型文件并构建用于推理的计算图 @@ -347,12 +351,22 @@ target_link_libraries( float scores[RET_CATEGORY_SUM]; for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - if (temp_scores[i] > 0.5) { - MS_PRINT("MindSpore scores[%d] : [%f]", i, temp_scores[i]); - } scores[i] = temp_scores[i]; } - + + float unifiedThre = 0.5; + float probMax = 1.0; + for (size_t i = 0; i < RET_CATEGORY_SUM; ++i) { + float threshold = g_thres_map[i]; + float tmpProb = scores[i]; + if (tmpProb < threshold) { + tmpProb = tmpProb / threshold * unifiedThre; + } else { + tmpProb = (tmpProb - threshold) / (probMax - threshold) * unifiedThre + unifiedThre; + } + scores[i] = tmpProb; + } + // Score for each category. // Converted to text information that needs to be displayed in the APP. std::string categoryScore = ""; @@ -365,4 +379,4 @@ target_link_libraries( } return categoryScore; } - ``` + ``` -- Gitee