diff --git a/frameworks/native/neural_network_core/neural_network_core.cpp b/frameworks/native/neural_network_core/neural_network_core.cpp index 57667052b3848ff024b903547b2495fed22f1138..52992300b6b2876c06f7d27b0c5d69dcfdb75c3e 100644 --- a/frameworks/native/neural_network_core/neural_network_core.cpp +++ b/frameworks/native/neural_network_core/neural_network_core.cpp @@ -81,7 +81,7 @@ NNRT_API OH_NN_ReturnCode OH_NNDevice_GetName(size_t deviceID, const char **name BackendManager& backendManager = BackendManager::GetInstance(); const std::string& backendName = backendManager.GetBackendName(deviceID); if (backendName.empty()) { - LOGE("OH_NNDevice_GetName failed, error happened when getting name of deviceID %{public}zu.", deviceID); + LOGE("OH_NNDevice_GetName failed, error happened when getting name of deviceID."); *name = nullptr; return OH_NN_FAILED; } @@ -106,7 +106,7 @@ NNRT_API OH_NN_ReturnCode OH_NNDevice_GetType(size_t deviceID, OH_NN_DeviceType* OH_NN_ReturnCode ret = backend->GetBackendType(*deviceType); if (ret != OH_NN_SUCCESS) { - LOGE("OH_NNDevice_GetType failed, device id: %{public}zu.", deviceID); + LOGE("OH_NNDevice_GetType failed."); return ret; } return OH_NN_SUCCESS; @@ -638,17 +638,17 @@ OH_NN_ReturnCode GetModelId(Compilation** compilation) } if (nnrtService.GetNNRtModelIDFromPath == nullptr) { - LOGE("GetModelId failed, nnrtService GetNNRtModelIDFromPath func is nullptr"); + LOGE("GetModelId failed, nnrtService GetNNRtModelIDFromPath func is nullptr."); return OH_NN_INVALID_PARAMETER; } if (nnrtService.GetNNRtModelIDFromBuffer == nullptr) { - LOGE("GetModelId failed, nnrtService GetNNRtModelIDFromBuffer func is nullptr"); + LOGE("GetModelId failed, nnrtService GetNNRtModelIDFromBuffer func is nullptr."); return OH_NN_INVALID_PARAMETER; } if (nnrtService.GetNNRtModelIDFromModel == nullptr) { - LOGE("GetModelId failed, nnrtService GetNNRtModelIDFromModel func is nullptr"); + LOGE("GetModelId failed, nnrtService GetNNRtModelIDFromModel func is nullptr."); return OH_NN_INVALID_PARAMETER; } @@ -978,7 +978,7 @@ NNRT_API NN_Tensor* OH_NNTensor_Create(size_t deviceID, NN_TensorDesc *tensorDes BackendManager& backendManager = BackendManager::GetInstance(); std::shared_ptr backend = backendManager.GetBackend(deviceID); if (backend == nullptr) { - LOGE("OH_NNTensor_Create failed, passed invalid backend name %{public}zu.", deviceID); + LOGE("OH_NNTensor_Create failed, passed invalid backend name."); return nullptr; } @@ -1010,7 +1010,7 @@ NNRT_API NN_Tensor* OH_NNTensor_CreateWithSize(size_t deviceID, NN_TensorDesc *t BackendManager& backendManager = BackendManager::GetInstance(); std::shared_ptr backend = backendManager.GetBackend(deviceID); if (backend == nullptr) { - LOGE("OH_NNTensor_CreateWithSize failed, passed invalid backend name %{public}zu.", deviceID); + LOGE("OH_NNTensor_CreateWithSize failed, passed invalid backend name."); return nullptr; } @@ -1068,7 +1068,7 @@ NNRT_API NN_Tensor* OH_NNTensor_CreateWithFd(size_t deviceID, BackendManager& backendManager = BackendManager::GetInstance(); std::shared_ptr backend = backendManager.GetBackend(deviceID); if (backend == nullptr) { - LOGE("OH_NNTensor_CreateWithFd failed, passed invalid backend name %{public}zu.", deviceID); + LOGE("OH_NNTensor_CreateWithFd failed, passed invalid backend name."); return nullptr; } diff --git a/frameworks/native/neural_network_runtime/neural_network_runtime.cpp b/frameworks/native/neural_network_runtime/neural_network_runtime.cpp index 25d71c3cd060130c5bf9f91f339d2654d86e56c2..a4212d7f4a20880ef6e96c855aa1e55224a04d92 100644 --- a/frameworks/native/neural_network_runtime/neural_network_runtime.cpp +++ b/frameworks/native/neural_network_runtime/neural_network_runtime.cpp @@ -503,7 +503,31 @@ NNRT_API OH_NN_ReturnCode OH_NNModel_BuildFromLiteGraph(OH_NNModel *model, const return innerModel->BuildFromLiteGraph(pLiteGraph, extensionConfig); } -NNRT_API bool OH_NNModel_HasCache(const char *cacheDir, const char *modelName) +bool CheckCacheFile(char* path, int64_t fileNumber, int64_t cacheVersion) +{ + std::ifstream ifs(path, std::ios::in | std::ios::binary); + if (!ifs) { + LOGI("OH_NNModel_HasCache open cache info file failed."); + return false; + } + + if (!ifs.read(reinterpret_cast(&(fileNumber)), sizeof(fileNumber))) { + LOGI("CheckCacheFile read fileNumber cache info file failed."); + ifs.close(); + return false; + } + + if (!ifs.read(reinterpret_cast(&(cacheVersion)), sizeof(cacheVersion))) { + LOGI("CheckCacheFile read cacheVersion cache info file failed."); + ifs.close(); + return false; + } + + ifs.close(); + return true; +} + +NNRT_API bool OH_NNModel_HasCache(const char *cacheDir, const char *modelName, uint32_t version) { if (cacheDir == nullptr) { LOGI("OH_NNModel_HasCache get empty cache directory."); @@ -536,19 +560,12 @@ NNRT_API bool OH_NNModel_HasCache(const char *cacheDir, const char *modelName) return false; } - std::ifstream ifs(path, std::ios::in | std::ios::binary); - if (!ifs) { - LOGI("OH_NNModel_HasCache open cache info file failed."); - return false; - } - int64_t fileNumber{0}; - if (!ifs.read(reinterpret_cast(&(fileNumber)), sizeof(fileNumber))) { - LOGI("OH_NNModel_HasCache read cache info file failed."); - ifs.close(); + int64_t cacheVersion{0}; + if (!CheckCacheFile(path, fileNumber, cacheVersion)) { + LOGI("OH_NNModel_HasCache read fileNumber or cacheVersion filed."); return false; } - ifs.close(); // determine whether cache model files exist for (int64_t i = 0; i < fileNumber; ++i) { @@ -557,6 +574,11 @@ NNRT_API bool OH_NNModel_HasCache(const char *cacheDir, const char *modelName) exist = (exist && (stat(cacheModelPath.c_str(), &buffer) == 0)); } + if (cacheVersion != version) { + LOGW("OH_NNModel_HasCache version is not match."); + exist = false; + } + return exist; } diff --git a/frameworks/native/neural_network_runtime/nncompiled_cache.cpp b/frameworks/native/neural_network_runtime/nncompiled_cache.cpp index dde032cdc1a1eb18fe61ae25964038e717ae9ba8..05aeedad432fb04293f4063e7cd3c20798431af2 100644 --- a/frameworks/native/neural_network_runtime/nncompiled_cache.cpp +++ b/frameworks/native/neural_network_runtime/nncompiled_cache.cpp @@ -213,8 +213,9 @@ OH_NN_ReturnCode NNCompiledCache::GenerateCacheModel(const std::vector(modelCacheInfo.deviceId); if (deviceId != m_backendID) { - LOGE("[NNCompiledCache] CheckCacheInfo failed. The deviceId=%{public}zu in the cache files " - "is different from current deviceId=%{public}zu," - "please change the cache directory or current deviceId.", - deviceId, - m_backendID); + LOGE("[NNCompiledCache] CheckCacheInfo failed. The deviceId in the cache files " + "is different from current deviceId," + "please change the cache directory or current deviceId."); infoCacheFile.close(); return OH_NN_INVALID_PARAMETER; } diff --git a/frameworks/native/neural_network_runtime/nncompiler.cpp b/frameworks/native/neural_network_runtime/nncompiler.cpp index 31656e3e60848cf971dd2a86932e20cd42252b48..a65f5ddcca6361c6ae86926358d61771e2fc3e83 100644 --- a/frameworks/native/neural_network_runtime/nncompiler.cpp +++ b/frameworks/native/neural_network_runtime/nncompiler.cpp @@ -450,6 +450,32 @@ OH_NN_ReturnCode NNCompiler::OnlineBuild() { // cache存在,从cache直接复原prepareModel、input/output TensorDesc OH_NN_ReturnCode ret = RestoreFromCacheFile(); + if (ret != OH_NN_SUCCESS) { + LOGE("[NNCompiler] cache file is failed, to delete cache."); + char path[PATH_MAX]; + if (realpath(m_cachePath.c_str(), path) == nullptr) { + LOGW("[NNCompiledCache] WriteCacheInfo failed, fail to get the real path of cacheDir."); + } + + std::string cachePath = path; + std::string firstCache = cachePath + "/" + m_extensionConfig.modelName + "0.nncache"; + std::string secondCache = cachePath + "/" + m_extensionConfig.modelName + "1.nncache"; + std::string thirdCache = cachePath + "/" + m_extensionConfig.modelName + "2.nncache"; + std::string cacheInfo = cachePath + "/" + m_extensionConfig.modelName + "cache_info.nncache"; + if (std::filesystem::exists(firstCache)) { + std::filesystem::remove_all(firstCache); + } + if (std::filesystem::exists(secondCache)) { + std::filesystem::remove_all(secondCache); + } + if (std::filesystem::exists(thirdCache)) { + std::filesystem::remove_all(thirdCache); + } + if (std::filesystem::exists(cacheInfo)) { + std::filesystem::remove_all(cacheInfo); + } + } + if (ret == OH_NN_OPERATION_FORBIDDEN) { LOGE("[NNCompiler] Build failed, operation is forbidden."); return ret; diff --git a/interfaces/innerkits/c/neural_network_runtime_inner.h b/interfaces/innerkits/c/neural_network_runtime_inner.h index f6eabda3e7fac894979c4fbbb08c558dce18bca1..c35dff3f0a11ad9fae7aa0faa9c8721844d7f277 100644 --- a/interfaces/innerkits/c/neural_network_runtime_inner.h +++ b/interfaces/innerkits/c/neural_network_runtime_inner.h @@ -139,7 +139,7 @@ OH_NN_ReturnCode OH_NNModel_BuildFromMetaGraph(OH_NNModel *model, const void *me * @since 11 * @version 1.0 */ -bool OH_NNModel_HasCache(const char *cacheDir, const char *modelName); +bool OH_NNModel_HasCache(const char *cacheDir, const char *modelName, uint32_t version); #ifdef __cplusplus }