diff --git a/tutorials/lite/source_en/quick_start/quick_start.md b/tutorials/lite/source_en/quick_start/quick_start.md index 3aec79eae32f73db9496d1ce921fa0e3295dd7c0..a7781653ff473ac08d6d4be2aea2b1c8977fe6f9 100644 --- a/tutorials/lite/source_en/quick_start/quick_start.md +++ b/tutorials/lite/source_en/quick_start/quick_start.md @@ -108,7 +108,7 @@ app │ | │ ├── cpp # main logic encapsulation classes for model loading and prediction | | |── ... -| | ├── mindspore_lite_1.0.0-minddata-arm64-cpu` #MindSpore Lite version +| | ├── mindspore-lite-1.0.1-runtime-arm64-cpu #MindSpore Lite version | | ├── MindSporeNetnative.cpp # JNI methods related to MindSpore calling │ | └── MindSporeNetnative.h # header file │ | @@ -133,11 +133,11 @@ app When MindSpore C++ APIs are called at the Android JNI layer, related library files are required. You can use MindSpore Lite [source code compilation](https://www.mindspore.cn/tutorial/lite/en/master/use/build.html) to generate the MindSpore Lite version. In this case, you need to use the compile command of generate with image preprocessing module. -In this example, the build process automatically downloads the `mindspore-lite-1.0.0-minddata-arm64-cpu` by the `app/download.gradle` file and saves in the `app/src/main/cpp` directory. +In this example, the build process automatically downloads the `mindspore-lite-1.0.1-runtime-arm64-cpu` by the `app/download.gradle` file and saves in the `app/src/main/cpp` directory. Note: if the automatic download fails, please manually download the relevant library files and put them in the corresponding location. -mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz [Download link](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz) +mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz [Download link](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz) ```text android{ @@ -165,14 +165,18 @@ include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include) add_library(mindspore-lite SHARED IMPORTED ) add_library(minddata-lite SHARED IMPORTED ) +add_library(libmindspore-lite-fp16 SHARED IMPORTED ) set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so) set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION - ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libminddata-lite.so) + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/lib/libminddata-lite.so) +set_target_properties(libmindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite-fp16.so) # --------------- MindSpore Lite set End. -------------------- # Link target library. @@ -181,6 +185,7 @@ target_link_libraries( # --- mindspore --- minddata-lite mindspore-lite + libmindspore-lite-fp16 ... ) ``` @@ -215,15 +220,17 @@ The inference code process is as follows. For details about the complete code, s void **labelEnv = new void *; MSNetWork *labelNet = new MSNetWork; *labelEnv = labelNet; - + // Create context. mindspore::lite::Context *context = new mindspore::lite::Context; context->thread_num_ = num_thread; - + // Create the mindspore session. labelNet->CreateSessionMS(modelBuffer, bufferLen, "device label", context); delete(context); - + context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; + context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = true; + context->device_list_[0].device_type_ = mindspore::lite::DT_CPU; ``` - Load the model file and build a computational graph for inference. @@ -327,24 +334,32 @@ The inference code process is as follows. For details about the complete code, s std::string ProcessRunnetResult(std::unordered_map msOutputs, int runnetRet) { - std::unordered_map::iterator iter; - iter = msOutputs.begin(); - - // The mobilenetv2.ms model output just one branch. - auto outputTensor = iter->second; - int tensorNum = outputTensor->ElementsNum(); + std::unordered_map::iterator iter; + iter = msOutputs.begin(); - // Get a pointer to the first score. - float *temp_scores = static_cast(outputTensor->MutableData()); + // The mobilenetv2.ms model output just one branch. + auto outputTensor = iter->second; + int tensorNum = outputTensor->ElementsNum(); - float scores[RET_CATEGORY_SUM]; - for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - if (temp_scores[i] > 0.5) { - MS_PRINT("MindSpore scores[%d] : [%f]", i, temp_scores[i]); + // Get a pointer to the first score. + float *temp_scores = static_cast(outputTensor->MutableData()); + float scores[RET_CATEGORY_SUM]; + for (int i = 0; i < RET_CATEGORY_SUM; ++i) { + scores[i] = temp_scores[i]; + } + + float unifiedThre = 0.5; + float probMax = 1.0; + for (size_t i = 0; i < RET_CATEGORY_SUM; ++i) { + float threshold = g_thres_map[i]; + float tmpProb = scores[i]; + if (tmpProb < threshold) { + tmpProb = tmpProb / threshold * unifiedThre; + } else { + tmpProb = (tmpProb - threshold) / (probMax - threshold) * unifiedThre + unifiedThre; } - scores[i] = temp_scores[i]; + scores[i] = tmpProb; } - // Score for each category. // Converted to text information that needs to be displayed in the APP. std::string categoryScore = ""; @@ -357,4 +372,4 @@ The inference code process is as follows. For details about the complete code, s } return categoryScore; } - ``` + ``` diff --git a/tutorials/lite/source_zh_cn/quick_start/quick_start.md b/tutorials/lite/source_zh_cn/quick_start/quick_start.md index 75d6e2eda5ab1ce0612922498c5dff4993ceea8a..112ea7a785cc02c0a852a84c6db5f653ccb94edc 100644 --- a/tutorials/lite/source_zh_cn/quick_start/quick_start.md +++ b/tutorials/lite/source_zh_cn/quick_start/quick_start.md @@ -111,7 +111,7 @@ app │ | │ ├── cpp # 模型加载和预测主要逻辑封装类 | | ├── .. -| | ├── mindspore-lite-1.0.0-minddata-arm64-cpu # MindSpore Lite版本 +| | ├── mindspore-lite-1.0.1-runtime-arm64-cpu # MindSpore Lite版本 | | ├── MindSporeNetnative.cpp # MindSpore调用相关的JNI方法 │ | └── MindSporeNetnative.h # 头文件 | | └── MsNetWork.cpp # MindSpore接口封装 @@ -147,7 +147,7 @@ Android JNI层调用MindSpore C++ API时,需要相关库文件支持。可通 注: 若自动下载失败,请手动下载相关库文件,解压后将其放在对应位置: - mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz [下载链接](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.0/lite/android_aarch64/mindspore-lite-1.0.0-minddata-arm64-cpu.tar.gz) + mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz [下载链接](https://ms-release.obs.cn-north-4.myhuaweicloud.com/1.0.1/lite/android_aarch64/mindspore-lite-1.0.1-runtime-arm64-cpu.tar.gz) ```text android{ @@ -175,14 +175,18 @@ include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/ir/dtype) include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/include/schema) +include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/include) add_library(mindspore-lite SHARED IMPORTED ) add_library(minddata-lite SHARED IMPORTED ) +add_library(libmindspore-lite-fp16 SHARED IMPORTED ) set_target_properties(mindspore-lite PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite.so) set_target_properties(minddata-lite PROPERTIES IMPORTED_LOCATION - ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libminddata-lite.so) + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/minddata/lib/libminddata-lite.so) +set_target_properties(libmindspore-lite-fp16 PROPERTIES IMPORTED_LOCATION + ${CMAKE_SOURCE_DIR}/src/main/cpp/${MINDSPORELITE_VERSION}/lib/libmindspore-lite-fp16.so) # --------------- MindSpore Lite set End. -------------------- # Link target library. @@ -191,6 +195,7 @@ target_link_libraries( # --- mindspore --- minddata-lite mindspore-lite + libmindspore-lite-fp16 ... ) ``` @@ -223,15 +228,17 @@ target_link_libraries( void **labelEnv = new void *; MSNetWork *labelNet = new MSNetWork; *labelEnv = labelNet; - + // Create context. mindspore::lite::Context *context = new mindspore::lite::Context; context->thread_num_ = num_thread; - + // Create the mindspore session. labelNet->CreateSessionMS(modelBuffer, bufferLen, context); delete (context); - + context->device_list_[0].device_info_.cpu_device_info_.cpu_bind_mode_ = mindspore::lite::NO_BIND; + context->device_list_[0].device_info_.cpu_device_info_.enable_float16_ = true; + context->device_list_[0].device_type_ = mindspore::lite::DT_CPU; ``` - 加载模型文件并构建用于推理的计算图 @@ -341,18 +348,27 @@ target_link_libraries( // The mobilenetv2.ms model output just one branch. auto outputTensor = iter->second; int tensorNum = outputTensor->ElementsNum(); - - // Get a pointer to the first score. + + // Get a pointer to the first score. float *temp_scores = static_cast(outputTensor->MutableData()); - float scores[RET_CATEGORY_SUM]; for (int i = 0; i < RET_CATEGORY_SUM; ++i) { - if (temp_scores[i] > 0.5) { - MS_PRINT("MindSpore scores[%d] : [%f]", i, temp_scores[i]); - } - scores[i] = temp_scores[i]; + scores[i] = temp_scores[i]; } - + + float unifiedThre = 0.5; + float probMax = 1.0; + for (size_t i = 0; i < RET_CATEGORY_SUM; ++i) { + float threshold = g_thres_map[i]; + float tmpProb = scores[i]; + if (tmpProb < threshold) { + tmpProb = tmpProb / threshold * unifiedThre; + } else { + tmpProb = (tmpProb - threshold) / (probMax - threshold) * unifiedThre + unifiedThre; + } + scores[i] = tmpProb; + } + // Score for each category. // Converted to text information that needs to be displayed in the APP. std::string categoryScore = ""; @@ -365,4 +381,4 @@ target_link_libraries( } return categoryScore; } - ``` + ```