diff --git a/frameworks/native/neural_network_runtime/hdi_prepared_model_v2_0.cpp b/frameworks/native/neural_network_runtime/hdi_prepared_model_v2_0.cpp index adca2bf15f1501a45d174d1bb278122d235078e8..b3b535b9048f43b84ee8983fc8b95dabd207038b 100644 --- a/frameworks/native/neural_network_runtime/hdi_prepared_model_v2_0.cpp +++ b/frameworks/native/neural_network_runtime/hdi_prepared_model_v2_0.cpp @@ -166,6 +166,17 @@ HDIPreparedModelV2_0::HDIPreparedModelV2_0(OHOS::sptr hdiP hdiPreparedModel->GetVersion(m_hdiVersion.first, m_hdiVersion.second); } +HDIPreparedModelV2_0::~HDIPreparedModelV2_0() +{ + for (auto addr : m_addrs) { + auto memManager = MemoryManager::GetInstance(); + OH_NN_ReturnCode ret = memManager->UnMapMemory(addr); + if (ret != OH_NN_SUCCESS) { + LOGE("~HDIPreparedModelV2_0 UnMapMemory failed."); + } + } +} + OH_NN_ReturnCode HDIPreparedModelV2_0::ExportModelCache(std::vector& modelCache) { if (!modelCache.empty()) { @@ -187,6 +198,7 @@ OH_NN_ReturnCode HDIPreparedModelV2_0::ExportModelCache(std::vector& mod LOGE("Export the %{public}zuth model cache failed, cannot not map fd to address.", i + 1); return OH_NN_MEMORY_ERROR; } + m_addrs.emplace_back(addr); Buffer modelbuffer {addr, iBuffers[i].bufferSize}; modelCache.emplace_back(modelbuffer); } diff --git a/frameworks/native/neural_network_runtime/hdi_prepared_model_v2_0.h b/frameworks/native/neural_network_runtime/hdi_prepared_model_v2_0.h index 7f3defde710e30866dd3a47b1be4d6ce63d7e917..cbbb9b9b2e19dc4d373930ca104badb2c433317d 100644 --- a/frameworks/native/neural_network_runtime/hdi_prepared_model_v2_0.h +++ b/frameworks/native/neural_network_runtime/hdi_prepared_model_v2_0.h @@ -34,6 +34,7 @@ namespace NeuralNetworkRuntime { class HDIPreparedModelV2_0 : public PreparedModel { public: explicit HDIPreparedModelV2_0(OHOS::sptr hdiPreparedModel); + ~HDIPreparedModelV2_0() override; OH_NN_ReturnCode ExportModelCache(std::vector& modelCache) override; @@ -54,6 +55,7 @@ private: // first: major version, second: minor version std::pair m_hdiVersion; OHOS::sptr m_hdiPreparedModel {nullptr}; + std::vector m_addrs; }; } // namespace NeuralNetworkRuntime } // OHOS diff --git a/frameworks/native/neural_network_runtime/nntensor.cpp b/frameworks/native/neural_network_runtime/nntensor.cpp index 477bdf60f84b79397d224c7bc926c62e4cb20539..d234227d863dfa96b457b482c24e37577131e18a 100644 --- a/frameworks/native/neural_network_runtime/nntensor.cpp +++ b/frameworks/native/neural_network_runtime/nntensor.cpp @@ -26,9 +26,7 @@ namespace OHOS { namespace NeuralNetworkRuntime { NNTensor2_0::~NNTensor2_0() { - if (!m_isUserData) { - ReleaseMemory(); - } + ReleaseMemory(); delete m_tensorDesc; m_tensorDesc = nullptr; @@ -247,37 +245,39 @@ OH_NN_ReturnCode NNTensor2_0::ReleaseMemory() return OH_NN_INVALID_PARAMETER; } - BackendManager& backendManager = BackendManager::GetInstance(); - std::shared_ptr backend = backendManager.GetBackend(m_backendID); - if (backend == nullptr) { - LOGE("NNTensor2_0::ReleaseMemory failed, failed to get backend of %{public}zu.", m_backendID); - return OH_NN_NULL_PTR; - } - - auto* nnrtBackend = reinterpret_cast(backend.get()); - auto device = nnrtBackend->GetDevice(); - if (device == nullptr) { - LOGE(""); - return OH_NN_NULL_PTR; - } - auto oldRet = device->ReleaseBuffer(m_fd, m_size); - if (oldRet != OH_NN_SUCCESS) { - LOGE("NNTensor2_0::ReleaseMemory failed, failed to release buffer."); - return OH_NN_MEMORY_ERROR; - } - auto unmapResult = munmap(m_data, m_size); if (unmapResult != 0) { LOGE("NNTensor2_0::ReleaseMemory failed. Please try again."); return OH_NN_MEMORY_ERROR; + } + + if (!m_isUserData) { + if (close(m_fd) != 0) { + LOGE("NNTensor2_0::ReleaseMemory failed. fd=%{public}d", m_fd); + return OH_NN_MEMORY_ERROR; + } + BackendManager& backendManager = BackendManager::GetInstance(); + std::shared_ptr backend = backendManager.GetBackend(m_backendID); + if (backend == nullptr) { + LOGE("NNTensor2_0::ReleaseMemory failed, failed to get backend of %{public}zu.", m_backendID); + return OH_NN_NULL_PTR; + } + + auto* nnrtBackend = reinterpret_cast(backend.get()); + auto device = nnrtBackend->GetDevice(); + if (device == nullptr) { + LOGE(""); + return OH_NN_NULL_PTR; + } + auto oldRet = device->ReleaseBuffer(m_fd, m_size); + if (oldRet != OH_NN_SUCCESS) { + LOGE("NNTensor2_0::ReleaseMemory failed, failed to release buffer."); + return OH_NN_MEMORY_ERROR; + } } + m_data = nullptr; m_size = 0; - - if (close(m_fd) != 0) { - LOGE("NNTensor2_0::ReleaseMemory failed. fd=%{public}d", m_fd); - return OH_NN_MEMORY_ERROR; - } m_fd = 0; return OH_NN_SUCCESS;