From 95918b73790bf7fc0be7222f7e38fe1d812ba350 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=91=A8=E7=BF=94?= Date: Fri, 14 Jun 2024 10:24:57 +0800 Subject: [PATCH 1/4] =?UTF-8?q?UT=E7=94=A8=E4=BE=8B=E6=96=B0=E5=A2=9E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 周翔 --- test/unittest/common/v2_1/mock_idevice.cpp | 54 ++ test/unittest/common/v2_1/mock_idevice.h | 65 ++ test/unittest/components/BUILD.gn | 138 +++ .../components/nn_backend/nn_backend_test.cpp | 716 ++++++++++++++ .../nn_compiled_cache_test.cpp | 215 ++++ .../nn_compiler/nn_compiler_test.cpp | 100 ++ .../quant_param/quant_param_test.cpp | 143 +++ .../v2_1/hdi_device/hdi_device_test.cpp | 915 ++++++++++++++++++ .../hdi_prepared_model_test.cpp | 341 +++++++ 9 files changed, 2687 insertions(+) create mode 100644 test/unittest/common/v2_1/mock_idevice.cpp create mode 100644 test/unittest/common/v2_1/mock_idevice.h create mode 100644 test/unittest/components/nn_backend/nn_backend_test.cpp create mode 100644 test/unittest/components/nn_compiled_cache/nn_compiled_cache_test.cpp create mode 100644 test/unittest/components/nn_compiler/nn_compiler_test.cpp create mode 100644 test/unittest/components/quant_param/quant_param_test.cpp create mode 100644 test/unittest/components/v2_1/hdi_device/hdi_device_test.cpp create mode 100644 test/unittest/components/v2_1/hdi_prepared_model/hdi_prepared_model_test.cpp diff --git a/test/unittest/common/v2_1/mock_idevice.cpp b/test/unittest/common/v2_1/mock_idevice.cpp new file mode 100644 index 0000000..de73424 --- /dev/null +++ b/test/unittest/common/v2_1/mock_idevice.cpp @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "mock_idevice.h" + +namespace OHOS { +namespace HDI { +namespace Nnrt { +namespace V2_1 { +sptr INnrtDevice::Get(bool isStub) +{ + return INnrtDevice::Get("device_service", isStub); +} + +sptr INnrtDevice::Get(const std::string& serviceName, bool isStub) +{ + if (isStub) { + return nullptr; + } + + sptr mockIDevice = sptr(new (std::nothrow) MockIDevice()); + if (mockIDevice == nullptr) { + return nullptr; + } + std::string deviceName = "MockDevice"; + EXPECT_CALL(*((V2_1::MockIDevice*)mockIDevice.GetRefPtr()), GetDeviceName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(HDF_SUCCESS))); + + std::string vendorName = "MockVendor"; + EXPECT_CALL(*((V2_1::MockIDevice*)mockIDevice.GetRefPtr()), GetVendorName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(vendorName), ::testing::Return(HDF_SUCCESS))); + + V2_1::DeviceStatus deviceStatus = V2_1::DeviceStatus::AVAILABLE; + EXPECT_CALL(*((V2_1::MockIDevice*)mockIDevice.GetRefPtr()), GetDeviceStatus(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceStatus), ::testing::Return(HDF_SUCCESS))); + + return mockIDevice; +} +} // V2_1 +} // Nnrt +} // HDI +} // OHOS \ No newline at end of file diff --git a/test/unittest/common/v2_1/mock_idevice.h b/test/unittest/common/v2_1/mock_idevice.h new file mode 100644 index 0000000..791ed2c --- /dev/null +++ b/test/unittest/common/v2_1/mock_idevice.h @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef NEURAL_NETWORK_RUNTIME_MOCK_IDEVICE_H +#define NEURAL_NETWORK_RUNTIME_MOCK_IDEVICE_H + +#include + +#include "hdi_prepared_model_v2_1.h" +#include "memory_manager.h" +#include "transform.h" + +namespace OHOS { +namespace HDI { +namespace Nnrt { +namespace V2_1 { +class MockIDevice : public INnrtDevice { +public: + MOCK_METHOD1(GetDeviceName, int32_t(std::string&)); + MOCK_METHOD1(GetVendorName, int32_t(std::string&)); + MOCK_METHOD1(GetDeviceType, int32_t(DeviceType&)); + MOCK_METHOD1(GetDeviceStatus, int32_t(DeviceStatus&)); + MOCK_METHOD2(GetSupportedOperation, int32_t(const Model&, std::vector&)); + MOCK_METHOD1(IsFloat16PrecisionSupported, int32_t(bool&)); + MOCK_METHOD1(IsPerformanceModeSupported, int32_t(bool&)); + MOCK_METHOD1(IsPrioritySupported, int32_t(bool&)); + MOCK_METHOD1(IsDynamicInputSupported, int32_t(bool&)); + MOCK_METHOD3(PrepareModel, int32_t(const Model&, const ModelConfig&, OHOS::sptr&)); + MOCK_METHOD1(IsModelCacheSupported, int32_t(bool&)); + MOCK_METHOD3(PrepareModelFromModelCache, int32_t(const std::vector&, const ModelConfig&, + OHOS::sptr&)); + MOCK_METHOD3(PrepareOfflineModel, int32_t(const std::vector&, const ModelConfig&, + sptr&)); + MOCK_METHOD2(AllocateBuffer, int32_t(uint32_t, SharedBuffer&)); + MOCK_METHOD1(ReleaseBuffer, int32_t(const SharedBuffer&)); + MOCK_METHOD2(GetVersion, int32_t(uint32_t&, uint32_t&)); +}; + +class MockIPreparedModel : public IPreparedModel { +public: + MOCK_METHOD1(ExportModelCache, int32_t(std::vector&)); + MOCK_METHOD3(Run, int32_t(const std::vector&, const std::vector&, + std::vector>&)); + MOCK_METHOD2(GetInputDimRanges, int32_t(std::vector>&, std::vector>&)); + MOCK_METHOD2(GetVersion, int32_t(uint32_t&, uint32_t&)); + + static OH_NN_ReturnCode m_ExpectRetCode; +}; +} // V2_1 +} // Nnrt +} // HDI +} // OHOS +#endif // NEURAL_NETWORK_RUNTIME_MOCK_IDEVICE_H diff --git a/test/unittest/components/BUILD.gn b/test/unittest/components/BUILD.gn index 9829dac..8668c06 100644 --- a/test/unittest/components/BUILD.gn +++ b/test/unittest/components/BUILD.gn @@ -191,6 +191,90 @@ ohos_unittest("MemoryManagerTest") { ] } +ohos_unittest("QuantParamsTest") { + module_out_path = module_output_path + + sources = [ "./quant_param/quant_param_test.cpp" ] + configs = [ ":module_private_config" ] + + deps = [ + "../../../frameworks/native/neural_network_core:libneural_network_core", + "../../../frameworks/native/neural_network_runtime:libneural_network_runtime", + "//third_party/googletest:gmock_main", + "//third_party/googletest:gtest_main", + ] + + external_deps = [ + "drivers_interface_nnrt:libnnrt_proxy_1.0", + "hilog:libhilog", + "hitrace:libhitracechain", + "mindspore:mindir", + ] +} + +ohos_unittest("NNBackendTest") { + module_out_path = module_output_path + + sources = [ "./nn_backend/nn_backend_test.cpp" ] + configs = [ ":module_private_config" ] + + deps = [ + "../../../frameworks/native/neural_network_core:libneural_network_core", + "../../../frameworks/native/neural_network_runtime:libneural_network_runtime", + "//third_party/googletest:gmock_main", + "//third_party/googletest:gtest_main", + ] + + external_deps = [ + "drivers_interface_nnrt:libnnrt_proxy_1.0", + "hilog:libhilog", + "hitrace:libhitracechain", + "mindspore:mindir", + ] +} + +ohos_unittest("NNCompiledCacheTest") { + module_out_path = module_output_path + + sources = [ "./nn_compiled_cache/nn_compiled_cache_test.cpp" ] + configs = [ ":module_private_config" ] + + deps = [ + "../../../frameworks/native/neural_network_core:libneural_network_core", + "../../../frameworks/native/neural_network_runtime:libneural_network_runtime", + "//third_party/googletest:gmock_main", + "//third_party/googletest:gtest_main", + ] + + external_deps = [ + "drivers_interface_nnrt:libnnrt_proxy_1.0", + "hilog:libhilog", + "hitrace:libhitracechain", + "mindspore:mindir", + ] +} + +ohos_unittest("NNCompilerTest") { + module_out_path = module_output_path + + sources = [ "./nn_compiler/nn_compiler_test.cpp" ] + configs = [ ":module_private_config" ] + + deps = [ + "../../../frameworks/native/neural_network_core:libneural_network_core", + "../../../frameworks/native/neural_network_runtime:libneural_network_runtime", + "//third_party/googletest:gmock_main", + "//third_party/googletest:gtest_main", + ] + + external_deps = [ + "drivers_interface_nnrt:libnnrt_proxy_1.0", + "hilog:libhilog", + "hitrace:libhitracechain", + "mindspore:mindir", + ] +} + ohos_unittest("TransformV1_0Test") { module_out_path = module_output_path @@ -447,6 +531,30 @@ ohos_unittest("HDIDeviceV2_0Test") { ] } +ohos_unittest("HDIDeviceV2_1Test") { + module_out_path = module_output_path + + sources = [ "./v2_1/hdi_device/hdi_device_test.cpp" ] + sources += [ "../common/v2_1/mock_idevice.cpp" ] + sources += [ "../common/file_utils.cpp" ] + configs = [ ":module_private_config" ] + + deps = [ + "../../../frameworks/native/neural_network_core:libneural_network_core", + "../../../frameworks/native/neural_network_runtime:libneural_network_runtime", + "//third_party/googletest:gmock_main", + "//third_party/googletest:gtest_main", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_nnrt:libnnrt_proxy_2.0", + "hilog:libhilog", + "hitrace:libhitracechain", + "mindspore:mindir", + ] +} + ohos_unittest("HDIPreparedModelV2_0Test") { module_out_path = module_output_path @@ -471,6 +579,30 @@ ohos_unittest("HDIPreparedModelV2_0Test") { ] } +ohos_unittest("HDIPreparedModelV2_1Test") { + module_out_path = module_output_path + + sources = [ "./v2_1/hdi_prepared_model/hdi_prepared_model_test.cpp" ] + sources += [ "../common/v2_1/mock_idevice.cpp" ] + sources += [ "../common/file_utils.cpp" ] + configs = [ ":module_private_config" ] + + deps = [ + "../../../frameworks/native/neural_network_core:libneural_network_core", + "../../../frameworks/native/neural_network_runtime:libneural_network_runtime", + "//third_party/googletest:gmock_main", + "//third_party/googletest:gtest_main", + ] + + external_deps = [ + "c_utils:utils", + "drivers_interface_nnrt:libnnrt_proxy_2.0", + "hilog:libhilog", + "hitrace:libhitracechain", + "mindspore:mindir", + ] +} + ohos_unittest("TransformV2_0Test") { module_out_path = module_output_path @@ -620,11 +752,17 @@ group("components_unittest") { ":ExecutorV2_0Test", ":HDIDeviceV1_0Test", ":HDIDeviceV2_0Test", + ":HDIDeviceV2_1Test", ":HDIPreparedModelV1_0Test", ":HDIPreparedModelV2_0Test", + ":HDIPreparedModelV2_1Test", ":InnerModelV1_0Test", ":InnerModelV2_0Test", ":MemoryManagerTest", + ":QuantParamsTest", + ":NNBackendTest", + ":NNCompiledCacheTest", + ":NNCompilerTest", ":NeuralNetworkRuntimeV1_0Test", ":NeuralNetworkRuntimeV2_0Test", ":NnTensorV1_0Test", diff --git a/test/unittest/components/nn_backend/nn_backend_test.cpp b/test/unittest/components/nn_backend/nn_backend_test.cpp new file mode 100644 index 0000000..b280303 --- /dev/null +++ b/test/unittest/components/nn_backend/nn_backend_test.cpp @@ -0,0 +1,716 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "nnbackend.h" +#include "device.h" +#include "interfaces/kits/c/neural_network_runtime/neural_network_runtime_type.h" +#include "backend_manager.h" + +using namespace testing; +using namespace testing::ext; +using namespace OHOS::NeuralNetworkRuntime; + +namespace OHOS { +namespace NeuralNetworkRuntime { +namespace UnitTest { +class NNBackendTest : public testing::Test { +public: + NNBackendTest() = default; + ~NNBackendTest() = default; +}; + +class MockIDevice : public Device { +public: + MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&)); + MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&)); + MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&)); + MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&)); + MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&)); + MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr, + std::vector&)); + MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD4(PrepareModel, OH_NN_ReturnCode(std::shared_ptr, + const Buffer&, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD4(PrepareModel, OH_NN_ReturnCode(const void*, + const Buffer&, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD3(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector&, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD1(AllocateBuffer, void*(size_t)); + MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr)); + MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr)); + MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*)); + MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&)); + MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t)); +}; + +/** + * @tc.name: nnbackendtest_construct_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_construct_001, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_NE(hdiDevice, nullptr); +} + +/** + * @tc.name: nnbackendtest_getbackendname_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_001, TestSize.Level0) +{ + size_t backendID = 1; + // std::shared_ptr device = std::make_shared(); + std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); + std::string backendName = "mock"; + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(backendName)); +} + +/** + * @tc.name: nnbackendtest_getbackendname_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_002, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + std::string backendName = "mock"; + // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) + // .WillRepeatedly(::testing::Return(OH_NN_SUCCESS)); + + EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(backendName)); +} + +// /** +// * @tc.name: nnbackendtest_getbackendname_003 +// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. +// * @tc.type: FUNC +// */ +// HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_003, TestSize.Level0) +// { +// size_t backendID = 1; +// std::shared_ptr device = std::make_shared(); + +// std::string deviceName = "mock"; +// EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) +// .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(OH_NN_SUCCESS))); + +// std::unique_ptr hdiDevice = std::make_unique(device, backendID); +// EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(deviceName)); +// } + +/** + * @tc.name: nnbackendtest_getbackendname_005 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_005, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // std::string deviceName; + std::string backendName = "mock"; + EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); + + EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(backendName)); +} + +// /** +// * @tc.name: nnbackendtest_getbackendname_006 +// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. +// * @tc.type: FUNC +// */ +// HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_006, TestSize.Level0) +// { +// size_t backendID = 1; +// std::shared_ptr device = std::make_shared(); + +// std::string deviceName; +// std::string backendName = "mock"; +// EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) +// .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(OH_NN_SUCCESS))); + +// EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_)) +// .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(OH_NN_SUCCESS))); + +// std::unique_ptr hdiDevice = std::make_unique(device, backendID); +// EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(backendName)); +// } + +/** + * @tc.name: nnbackendtest_getbackendname_007 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_007, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // std::string deviceName; + std::string backendName = "mock"; + EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); + + EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); + + EXPECT_CALL(*((MockIDevice *) device.get()), GetVersion(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(backendName)); +} + +/** + * @tc.name: nnbackendtest_getbackendname_008 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_008, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // std::string deviceName; + std::string backendName = "mock"; + EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); + + EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); + + EXPECT_CALL(*((MockIDevice *) device.get()), GetVersion(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_SUCCESS, hdiDevice->GetBackendName(backendName)); +} + +/** + * @tc.name: nnbackendtest_getgackendtype_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getgackendtype_001, TestSize.Level0) +{ + size_t backendID = 1; + // std::shared_ptr device = std::make_shared(); + + OH_NN_DeviceType backendName = OH_NN_OTHERS; + // EXPECT_CALL(*((MockIDevice *) device.get()), GetBackendType(::testing::_)) + // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendType(backendName)); +} + +/** + * @tc.name: nnbackendtest_getgackendtype_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getgackendtype_002, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + OH_NN_DeviceType backendName = OH_NN_OTHERS; + EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendType(backendName)); +} + +/** + * @tc.name: nnbackendtest_getgackendtype_003 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getgackendtype_003, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + OH_NN_DeviceType backendName = OH_NN_OTHERS; + EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_SUCCESS, hdiDevice->GetBackendType(backendName)); +} + +/** + * @tc.name: nnbackendtest_getbackendstatus_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getbackendstatus_001, TestSize.Level0) +{ + size_t backendID = 1; + // std::shared_ptr device = std::make_shared(); + + DeviceStatus backendName = UNKNOWN; + // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendStatus(backendName)); +} + +/** + * @tc.name: nnbackendtest_getbackendstatus_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getbackendstatus_002, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + DeviceStatus backendName = UNKNOWN; + EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceStatus(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendStatus(backendName)); +} + +/** + * @tc.name: nnbackendtest_getbackendstatus_003 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getbackendstatus_003, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + DeviceStatus backendName = UNKNOWN; + EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceStatus(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_SUCCESS, hdiDevice->GetBackendStatus(backendName)); +} + +/** + * @tc.name: nnbackendtest_createcompiler_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_createcompiler_001, TestSize.Level0) +{ + size_t backendID = 1; + // std::shared_ptr device = std::make_shared(); + + Compilation backendName; + Compilation* compilation = &backendName; + // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); + EXPECT_NE(nullptr, hdiDevice->CreateCompiler(compilation)); +} + +/** + * @tc.name: nnbackendtest_createcompiler_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_createcompiler_002, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + Compilation backendName; + char a = 'a'; + backendName.offlineModelPath = &a; + char b = 'b'; + backendName.offlineModelBuffer.first = &b; + backendName.offlineModelBuffer.second = static_cast(0); + Compilation* compilation = &backendName; + // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(nullptr, hdiDevice->CreateCompiler(compilation)); +} + +/** + * @tc.name: nnbackendtest_createcompiler_003 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +// HWTEST_F(NNBackendTest, nnbackendtest_createcompiler_003, TestSize.Level0) +// { +// size_t backendID = 1; +// std::shared_ptr device = std::make_shared(); + +// Compilation backendName; +// char a = 'a'; +// backendName.nnModel = &a; +// Compilation* compilation = &backendName; + +// // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) +// // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + +// std::unique_ptr hdiDevice = std::make_unique(device, backendID); +// EXPECT_EQ(nullptr, hdiDevice->CreateCompiler(compilation)); +// } + +/** + * @tc.name: nnbackendtest_destroycompiler_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_destroycompiler_001, TestSize.Level0) +{ + size_t backendID = 1; + // std::shared_ptr device = std::make_shared(); + + // Compilation backendName; + // Compiler* compilation = backendName.compiler; + + // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, hdiDevice->DestroyCompiler(nullptr)); +} + +/** + * @tc.name: nnbackendtest_destroycompiler_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_destroycompiler_002, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // Compilation backendName; + NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID); + // NNCompiler* nncompiler = &nncompiler; + + // Compiler* compilation = backendName.compiler; + + // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_SUCCESS, hdiDevice->DestroyCompiler(nncompiler)); +} + +/** + * @tc.name: nnbackendtest_CreateExecutor_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_CreateExecutor_001, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // Compilation backendName; + // Compiler* compilation = backendName.compiler; + + // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(nullptr, hdiDevice->CreateExecutor(nullptr)); +} + +/** + * @tc.name: nnbackendtest_CreateExecutor_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_CreateExecutor_002, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + Compilation backendName; + Compilation* compilation = &backendName; + + // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(nullptr, hdiDevice->CreateExecutor(compilation)); +} + +/** + * @tc.name: nnbackendtest_CreateExecutor_003 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_CreateExecutor_003, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // Compilation backendName; + // backendName.compiler + // Compilation* compilation = &backendName; + Compilation *compilation = new (std::nothrow) Compilation(); + + // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(nullptr, hdiDevice->CreateExecutor(compilation)); +} + +/** + * @tc.name: nnbackendtest_DestroyExecutor_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_DestroyExecutor_001, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // Executor backendName; + // Executor* executor = &backendName; + // Compilation *compilation = new (std::nothrow) Compilation(); + + // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) + // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, hdiDevice->DestroyExecutor(nullptr)); +} + +/** + * @tc.name: nnbackendtest_DestroyExecutor_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +// HWTEST_F(NNBackendTest, nnbackendtest_DestroyExecutor_002, TestSize.Level0) +// { +// size_t backendID = 1; +// std::shared_ptr device = std::make_shared(); + +// Compilation *compilationImpl = new (std::nothrow) Compilation(); +// BackendManager& backendManager = BackendManager::GetInstance(); +// std::shared_ptr backend = backendManager.GetBackend(compilationImpl->backendID); +// Executor* executorImpl = backend->CreateExecutor(compilationImpl); + +// // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) +// // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); + +// std::unique_ptr hdiDevice = std::make_unique(device, backendID); +// EXPECT_NE(OH_NN_SUCCESS, hdiDevice->DestroyExecutor(executorImpl)); +// } + +/** + * @tc.name: nnbackendtest_createtensor_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_createtensor_001, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(nullptr, hdiDevice->CreateTensor(nullptr)); +} + +/** + * @tc.name: nnbackendtest_createtensor_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_createtensor_002, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + TensorDesc desc; + TensorDesc* tensorDesc = &desc; + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_NE(nullptr, hdiDevice->CreateTensor(tensorDesc)); +} + +/** + * @tc.name: nnbackendtest_destroytensor_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_destroytensor_001, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + std::unique_ptr hdiDevice = std::make_unique(device, backendID); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, hdiDevice->DestroyTensor(nullptr)); +} + +/** + * @tc.name: nnbackendtest_destroytensor_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +// HWTEST_F(NNBackendTest, nnbackendtest_destroytensor_002, TestSize.Level0) +// { +// size_t backendID = 1; +// std::shared_ptr device = std::make_shared(); + +// NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); +// BackendManager& backendManager = BackendManager::GetInstance(); +// size_t deviceID = 1; +// std::shared_ptr backend = backendManager.GetBackend(deviceID); +// TensorDesc* descImpl = reinterpret_cast(tensorDesc); +// Tensor* tensorImpl = backend->CreateTensor(descImpl); + +// std::unique_ptr hdiDevice = std::make_unique(device, backendID); +// EXPECT_EQ(OH_NN_SUCCESS, hdiDevice->DestroyTensor(tensorImpl)); +// } + +/** + * @tc.name: nnbackendtest_getdevice_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getdevice_001, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); + EXPECT_EQ(nullptr, hdiDevice->GetDevice()); +} + +/** + * @tc.name: nnbackendtest_getsupportedoperation_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getsupportedoperation_001, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + std::shared_ptr model = nullptr; + std::vector ops; + // std::shared_ptr model = std::make_shared(); + // std::vector ops {true}; + + std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, hdiDevice->GetSupportedOperation(model, ops)); +} + +/** + * @tc.name: nnbackendtest_getsupportedoperation_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getsupportedoperation_002, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // std::shared_ptr model = nullptr; + std::vector ops; + std::shared_ptr model = std::make_shared(); + // std::vector ops {true}; + + std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetSupportedOperation(model, ops)); +} + +/** + * @tc.name: nnbackendtest_getsupportedoperation_003 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getsupportedoperation_003, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // std::shared_ptr model = nullptr; + std::vector ops; + std::shared_ptr model = std::make_shared(); + // std::vector ops {true}; + + EXPECT_CALL(*((MockIDevice *) device.get()), GetSupportedOperation(::testing::_, ::testing::_)) + .WillRepeatedly(::testing::Return(OH_NN_FAILED)); + + std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetSupportedOperation(model, ops)); +} + +/** + * @tc.name: nnbackendtest_getsupportedoperation_004 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNBackendTest, nnbackendtest_getsupportedoperation_004, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // std::shared_ptr model = nullptr; + std::vector ops; + std::shared_ptr model = std::make_shared(); + // std::vector ops {true}; + + EXPECT_CALL(*((MockIDevice *) device.get()), GetSupportedOperation(::testing::_, ::testing::_)) + .WillRepeatedly(::testing::Return(OH_NN_SUCCESS)); + + std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); + EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetSupportedOperation(model, ops)); +} +} // namespace UnitTest +} // namespace NeuralNetworkRuntime +} // namespace OHOS \ No newline at end of file diff --git a/test/unittest/components/nn_compiled_cache/nn_compiled_cache_test.cpp b/test/unittest/components/nn_compiled_cache/nn_compiled_cache_test.cpp new file mode 100644 index 0000000..0a97102 --- /dev/null +++ b/test/unittest/components/nn_compiled_cache/nn_compiled_cache_test.cpp @@ -0,0 +1,215 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "nncompiled_cache.h" +#include "device.h" +#include "interfaces/kits/c/neural_network_runtime/neural_network_runtime_type.h" +#include "common/utils.h" + +using namespace testing; +using namespace testing::ext; +using namespace OHOS::NeuralNetworkRuntime; + +namespace OHOS { +namespace NeuralNetworkRuntime { +namespace UnitTest { +class NNCompiledCacheTest : public testing::Test { +public: + NNCompiledCacheTest() = default; + ~NNCompiledCacheTest() = default; +}; + +/** + * @tc.name: nncompiledcachetest_save_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_save_001, TestSize.Level0) +{ + NNCompiledCache nncompiledCache; + std::vector caches; + std::string m_cachePath = "a"; + uint32_t m_cacheVersion = 1; + + EXPECT_EQ(OH_NN_INVALID_PARAMETER, nncompiledCache.Save(caches, m_cachePath, m_cacheVersion)); +} + +/** + * @tc.name: nncompiledcachetest_restore_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_restore_001, TestSize.Level0) +{ + NNCompiledCache nncompiledCache; + std::string m_cachePath = "a"; + uint32_t m_cacheVersion = 1; + std::vector caches; + + EXPECT_EQ(OH_NN_INVALID_PARAMETER, nncompiledCache.Restore(m_cachePath, m_cacheVersion, caches)); +} + +/** + * @tc.name: nncompiledcachetest_setbackend_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_setbackend_001, TestSize.Level0) +{ + NNCompiledCache nncompiledCache; + size_t backendID = 1; + + EXPECT_EQ(OH_NN_INVALID_PARAMETER, nncompiledCache.SetBackend(backendID)); +} + +/** + * @tc.name: nncompiledcachetest_setmodelname_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_setmodelname_001, TestSize.Level0) +{ + NNCompiledCache nncompiledCache; + std::string m_modelName; +} + +/** + * @tc.name: nncompiledcachetest_generatecachefiles_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_generatecachefiles_001, TestSize.Level0) +// { +// NNCompiledCache nncompiledCache; +// std::vector caches; +// std::string m_cachePath = "a"; +// uint32_t m_cacheVersion = 1; + +// EXPECT_EQ(OH_NN_MEMORY_ERROR, nncompiledCache.GenerateCacheFiles(caches, m_cachePath, m_cacheVersion)); +// } + +// /** +// * @tc.name: nncompiledcachetest_generatecachemodel_001 +// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. +// * @tc.type: FUNC +// */ +// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_generatecachemodel_001, TestSize.Level0) +// { +// NNCompiledCache nncompiledCache; +// std::vector caches; +// const size_t cacheNumber = caches.size(); +// uint32_t cacheSize = 1; +// std::unique_ptr cacheInfo = CreateUniquePtr(cacheSize); +// std::string m_cachePath = "a"; +// uint32_t m_cacheVersion = 1; + +// EXPECT_EQ(OH_NN_MEOH_NN_INVALID_PARAMETERMORY_ERROR, nncompiledCache.GenerateCacheModel(caches, cacheInfo, m_cachePath, m_cacheVersion)); +// } + +// /** +// * @tc.name: nncompiledcachetest_writecacheinfo_001 +// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. +// * @tc.type: FUNC +// */ +// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_writecacheinfo_001, TestSize.Level0) +// { +// NNCompiledCache nncompiledCache; +// uint32_t infoCharNumber = cacheSize * sizeof(uint64_t); +// const size_t cacheNumber = caches.size(); +// uint32_t cacheSize = 1; +// std::unique_ptr cacheInfo = CreateUniquePtr(cacheSize); +// std::string m_cachePath = "a"; + +// EXPECT_EQ(OH_NN_INVALID_PARAMETER, nncompiledCache.WriteCacheInfo(infoCharNumber, cacheInfo, m_cachePath)); +// } + +// /** +// * @tc.name: nncompiledcachetest_checkcacheinfo_001 +// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. +// * @tc.type: FUNC +// */ +// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_checkcacheinfo_001, TestSize.Level0) +// { +// NNCompiledCache nncompiledCache; +// NNCompiledCacheInfo cacheInfo; +// std::string cacheInfoPath = "cache_info.nncache"; + +// EXPECT_EQ(OH_NN_INVALID_FILE, nncompiledCache.CheckCacheInfo(cacheInfo, cacheInfoPath)); +// } + +// /** +// * @tc.name: nncompiledcachetest_readcachemodelfile_001 +// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. +// * @tc.type: FUNC +// */ +// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_readcachemodelfile_001, TestSize.Level0) +// { +// NNCompiledCache nncompiledCache; +// std::string cacheModelPath = ".nncache"; +// OHOS::NeuralNetworkRuntime::Buffer modelBuffer; + +// EXPECT_EQ(OH_NN_INVALID_FILE, nncompiledCache.ReadCacheModelFile(cacheModelPath, modelBuffer)); +// } + +// /** +// * @tc.name: nncompiledcachetest_getcrc16_001 +// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. +// * @tc.type: FUNC +// */ +// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_getcrc16_001, TestSize.Level0) +// { +// NNCompiledCache nncompiledCache; +// std::vector caches; +// std::string cacheModelPath = ".nncache"; +// OHOS::NeuralNetworkRuntime::Buffer modelBuffer; + +// EXPECT_EQ(OH_NN_INVALID_FILE, nncompiledCache.GetCrc16(static_cast(caches[i].data), caches[i].length)); +// } + +// /** +// * @tc.name: nncompiledcachetest_getcachefilelength_001 +// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. +// * @tc.type: FUNC +// */ +// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_getcachefilelength_001, TestSize.Level0) +// { +// NNCompiledCache nncompiledCache; +// std::string filePath = ".nncache"; +// std::ifstream ifs(filePath.c_str(), std::ios::in | std::ios::binary); +// int fsize{-1}; + +// EXPECT_EQ(OH_NN_INVALID_FILE, nncompiledCache.GetCacheFileLength(ifs, fsize)); +// } + +// /** +// * @tc.name: nncompiledcachetest_verifycachepath_001 +// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. +// * @tc.type: FUNC +// */ +// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_verifycachepath_001, TestSize.Level0) +// { +// NNCompiledCache nncompiledCache; +// char path[PATH_MAX]; + +// EXPECT_EQ(OH_NN_INVALID_FILE, nncompiledCache.VerifyCachePath(path)); +// } + + +} // namespace UnitTest +} // namespace NeuralNetworkRuntime +} // namespace OHOS \ No newline at end of file diff --git a/test/unittest/components/nn_compiler/nn_compiler_test.cpp b/test/unittest/components/nn_compiler/nn_compiler_test.cpp new file mode 100644 index 0000000..934fa26 --- /dev/null +++ b/test/unittest/components/nn_compiler/nn_compiler_test.cpp @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "nnbackend.h" +#include "nncompiler.h" +#include "device.h" +#include "interfaces/kits/c/neural_network_runtime/neural_network_runtime_type.h" +#include "common/utils.h" + +using namespace testing; +using namespace testing::ext; +using namespace OHOS::NeuralNetworkRuntime; + +namespace OHOS { +namespace NeuralNetworkRuntime { +namespace UnitTest { +class NNCompilerTest : public testing::Test { +public: + NNCompilerTest() = default; + ~NNCompilerTest() = default; +}; + +class MockIDevice : public Device { +public: + MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&)); + MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&)); + MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&)); + MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&)); + MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&)); + MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr, + std::vector&)); + MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&)); + MOCK_METHOD4(PrepareModel, OH_NN_ReturnCode(std::shared_ptr, + const Buffer&, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD4(PrepareModel, OH_NN_ReturnCode(const void*, + const Buffer&, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD3(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector&, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr, + const ModelConfig&, + std::shared_ptr&)); + MOCK_METHOD1(AllocateBuffer, void*(size_t)); + MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr)); + MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr)); + MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*)); + MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&)); + MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t)); +}; + +/** + * @tc.name: nncompilertest_construct_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(NNCompilerTest, nncompilertest_construct_001, TestSize.Level0) +{ + size_t backendID = 1; + std::shared_ptr device = std::make_shared(); + + // Compilation backendName; + NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID); + EXPECT_NE(nullptr, nncompiler); + + // NNCompiler* nncompiler_o; + // EXPECT_EQ(nullptr, nncompiler_o); + + // Compilation backendName; + // Compilation* compilation = &backendName; + // NNCompiler* nncompiler_t = new (std::nothrow) NNCompiler(compilation->nnModel, device, backendID); + // EXPECT_EQ(nullptr, nncompiler_t); +} + + +} // namespace UnitTest +} // namespace NeuralNetworkRuntime +} // namespace OHOS \ No newline at end of file diff --git a/test/unittest/components/quant_param/quant_param_test.cpp b/test/unittest/components/quant_param/quant_param_test.cpp new file mode 100644 index 0000000..6eb9b31 --- /dev/null +++ b/test/unittest/components/quant_param/quant_param_test.cpp @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "quant_param.h" + +using namespace testing; +using namespace testing::ext; +using namespace OHOS::NeuralNetworkRuntime; + +namespace OHOS { +namespace NeuralNetworkRuntime { +namespace UnitTest { +class QuantParamsTest : public testing::Test { +public: + QuantParamsTest() = default; + ~QuantParamsTest() = default; +}; + +/** + * @tc.name: quantparamstest_setscales_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(QuantParamsTest, quantparamstest_setscales_001, TestSize.Level0) +{ + QuantParams quantParams; + std::vector scales = {1, 2, 3, 4}; + quantParams.SetScales(scales); + EXPECT_EQ(false, quantParams.GetScales().empty()); +} + +/** + * @tc.name: quantparamstest_setscales_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(QuantParamsTest, quantparamstest_setscales_002, TestSize.Level0) +{ + QuantParams quantParams; + EXPECT_EQ(true, quantParams.GetScales().empty()); +} + +/** + * @tc.name: quantparamstest_setzeropoints_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(QuantParamsTest, quantparamstest_setzeropoints_001, TestSize.Level0) +{ + QuantParams quantParams; + std::vector zeroPoints = {1, 2, 3, 4}; + quantParams.SetZeroPoints(zeroPoints); + EXPECT_EQ(false, quantParams.GetZeroPoints().empty()); +} + +/** + * @tc.name: quantparamstest_setzeropoints_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(QuantParamsTest, quantparamstest_setzeropoints_002, TestSize.Level0) +{ + QuantParams quantParams; + EXPECT_EQ(true, quantParams.GetZeroPoints().empty()); +} + +/** + * @tc.name: quantparamstest_setnumbits_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(QuantParamsTest, quantparamstest_setnumbits_001, TestSize.Level0) +{ + QuantParams quantParams; + std::vector numBits = {1, 2, 3, 4}; + quantParams.SetNumBits(numBits); + EXPECT_EQ(false, quantParams.GetNumBits().empty()); +} + +/** + * @tc.name: quantparamstest_setnumbits_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(QuantParamsTest, quantparamstest_setnumbits_002, TestSize.Level0) +{ + QuantParams quantParams; + EXPECT_EQ(true, quantParams.GetNumBits().empty()); +} + +/** + * @tc.name: quantparamstest_copytocompat_001 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(QuantParamsTest, quantparamstest_copytocompat_001, TestSize.Level0) +{ + QuantParams quantParams; + std::vector scales = {1, 2}; + quantParams.SetScales(scales); + std::vector zeroPoints = {1, 2, 3}; + quantParams.SetZeroPoints(zeroPoints); + std::vector numBits = {1, 2, 3, 4}; + quantParams.SetNumBits(numBits); + std::vector compatQuantParams; + EXPECT_EQ(OH_NN_INVALID_PARAMETER, quantParams.CopyToCompat(compatQuantParams)); +} + +/** + * @tc.name: quantparamstest_copytocompat_002 + * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. + * @tc.type: FUNC + */ +HWTEST_F(QuantParamsTest, quantparamstest_copytocompat_002, TestSize.Level0) +{ + QuantParams quantParams; + std::vector scales = {1, 2, 3, 4}; + quantParams.SetScales(scales); + std::vector zeroPoints = {1, 2, 3, 4}; + quantParams.SetZeroPoints(zeroPoints); + std::vector numBits = {1, 2, 3, 4}; + quantParams.SetNumBits(numBits); + std::vector compatQuantParams; + EXPECT_EQ(OH_NN_SUCCESS, quantParams.CopyToCompat(compatQuantParams)); +} + +} // namespace UnitTest +} // namespace NeuralNetworkRuntime +} // namespace OHOS \ No newline at end of file diff --git a/test/unittest/components/v2_1/hdi_device/hdi_device_test.cpp b/test/unittest/components/v2_1/hdi_device/hdi_device_test.cpp new file mode 100644 index 0000000..00d2184 --- /dev/null +++ b/test/unittest/components/v2_1/hdi_device/hdi_device_test.cpp @@ -0,0 +1,915 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "hdi_device_v2_1.h" +#include "test/unittest/common/v2_1/mock_idevice.h" +#include "test/unittest/common/file_utils.h" + +using namespace testing; +using namespace testing::ext; +using namespace OHOS::NeuralNetworkRuntime; +namespace mindspore { +namespace lite { +OHOS::HDI::Nnrt::V2_1::Model* MindIR_LiteGraph_To_Model(const LiteGraph* lite_graph, + const OHOS::HDI::Nnrt::V2_1::SharedBuffer& buffer) +{ + return new (std::nothrow) OHOS::HDI::Nnrt::V2_1::Model(); +} + +void MindIR_Model_Destroy(OHOS::HDI::Nnrt::V2_1::Model** model) +{ + if ((model != nullptr) && (*model != nullptr)) { + delete *model; + *model = nullptr; + } +} + +size_t MindIR_LiteGraph_GetConstTensorSize(const mindspore::lite::LiteGraph* lite_graph) +{ + return 1; +} +} +} + +namespace OHOS { +namespace NeuralNetworkRuntime { +namespace UnitTest { +static const int DATA_VALUE = 1; +static const int DATA_NUM = 36; +static const int DIM_NUM = 3; + +void BuildLiteGraph(std::shared_ptr& model) +{ + model->name_ = "testGraph"; + model->input_indices_ = {0}; + model->output_indices_ = {1}; + model->all_tensors_ = {nullptr}; + const std::vector quant_params {}; + const std::vector data(DATA_NUM, DATA_VALUE); + const std::vector dim = {DIM_NUM, DIM_NUM}; + + for (size_t indexInput = 0; indexInput < model->input_indices_.size(); ++indexInput) { + model->all_tensors_.emplace_back(mindspore::lite::MindIR_Tensor_Create()); + } + + for (size_t indexOutput = 0; indexOutput < model->output_indices_.size(); ++indexOutput) { + model->all_tensors_.emplace_back(mindspore::lite::MindIR_Tensor_Create()); + } + + mindspore::lite::LiteGraph::Node node; + node.name_ = "testNode"; + mindspore::lite::LiteGraph::Node* testNode = &node; + model->all_nodes_.emplace_back(testNode); + model->all_nodes_.emplace_back(testNode); +} + +class HDIDeviceTest : public testing::Test { +protected: + void GetBuffer(void*& buffer, size_t length); + OH_NN_ReturnCode PrepareModel(int32_t allocBufferType, int32_t prepareType); +}; + +void HDIDeviceTest::GetBuffer(void*& buffer, size_t length) +{ + std::string data = "ABCD"; + const size_t dataLength = 100; + data.resize(dataLength, '+'); + + std::string filename = "/data/log/memory-001.dat"; + FileUtils fileUtils(filename); + fileUtils.WriteFile(data); + + int fd = open(filename.c_str(), O_RDWR); + EXPECT_NE(fd, -1); + + const auto &memoryManager = MemoryManager::GetInstance(); + buffer = memoryManager->MapMemory(fd, length); + EXPECT_NE(buffer, nullptr); + + const char* result = static_cast(buffer); + int index = 0; + EXPECT_EQ('A', result[index++]); + EXPECT_EQ('B', result[index++]); + EXPECT_EQ('C', result[index++]); + EXPECT_EQ('D', result[index++]); + close(fd); +} + +OH_NN_ReturnCode HDIDeviceTest::PrepareModel(int32_t allocBufferType, int32_t prepareType) +{ + std::shared_ptr model = std::make_shared(); + OHOS::sptr sp = OHOS::sptr(new (std::nothrow) V2_1::MockIDevice()); + EXPECT_NE(sp, nullptr); + + std::unique_ptr hdiDevice = std::make_unique(sp); + EXPECT_NE(hdiDevice, nullptr); + + V2_1::SharedBuffer buffer {1, 1, 0, 1}; + EXPECT_CALL(*sp, AllocateBuffer(::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<1>(buffer), ::testing::Return(allocBufferType))); + + std::shared_ptr preparedModel; + const int position = 2; + OHOS::sptr iPreparedModel = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_CALL(*sp, PrepareModel(::testing::_, ::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee(iPreparedModel), + ::testing::Return(prepareType))); + + ModelConfig config; + Buffer quantBuffer; + OH_NN_ReturnCode result = hdiDevice->PrepareModel(model, quantBuffer, config, preparedModel); + return result; +} + +/* * + * @tc.name: hdidevice_constructor_001 + * @tc.desc: Verify the Constructor function return object success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_constructor_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + EXPECT_NE(device, nullptr); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); +} + +/* * + * @tc.name: hdidevice_getdevicename_001 + * @tc.desc: Verify the GetDeviceName function validate device name success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getdevicename_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + std::string deviceName = "MockDevice"; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), GetDeviceName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(HDF_SUCCESS))); + + const std::string expectDeviceName = "MockDevice"; + std::string newDeviceName = ""; + OH_NN_ReturnCode result = hdiDevice->GetDeviceName(newDeviceName); + EXPECT_EQ(OH_NN_SUCCESS, result); + EXPECT_EQ(expectDeviceName, newDeviceName); +} + +/* * + * @tc.name: hdidevice_getdevicename_002 + * @tc.desc: Verify the GetDeviceName function return unavailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getdevicename_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + std::string deviceName = "MockDevice"; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), GetDeviceName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(HDF_FAILURE))); + OH_NN_ReturnCode result = hdiDevice->GetDeviceName(deviceName); + EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result); +} + +/* * + * @tc.name: hdidevice_getvendorname_001 + * @tc.desc: Verify the GetVendorName function validate vendor name success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getvendorname_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + std::string vendorName = "MockVendor"; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), GetVendorName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(vendorName), ::testing::Return(HDF_SUCCESS))); + + const std::string expectDeviceName = "MockVendor"; + std::string newVendorName = ""; + OH_NN_ReturnCode result = hdiDevice->GetVendorName(newVendorName); + EXPECT_EQ(OH_NN_SUCCESS, result); + EXPECT_EQ(expectDeviceName, newVendorName); +} + +/* * + * @tc.name: hdidevice_getvendorname_002 + * @tc.desc: Verify the GetVendorName function return unavailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getvendorname_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + std::string vendorName = "MockVendor"; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), GetVendorName(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(vendorName), ::testing::Return(HDF_FAILURE))); + OH_NN_ReturnCode result = hdiDevice->GetVendorName(vendorName); + EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result); +} + +/* * + * @tc.name: hdidevice_getdevicetype_001 + * @tc.desc: Verify the GetDeviceType function validate device type success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getdevicetype_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + V2_1::DeviceType iDeviceType = V2_1::DeviceType::CPU; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), GetDeviceType(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(iDeviceType), ::testing::Return(HDF_SUCCESS))); + + OH_NN_DeviceType expectDeviceType = OH_NN_CPU; + OH_NN_DeviceType newDeviceType = OH_NN_CPU; + OH_NN_ReturnCode result = hdiDevice->GetDeviceType(newDeviceType); + EXPECT_EQ(OH_NN_SUCCESS, result); + EXPECT_EQ(expectDeviceType, newDeviceType); +} + +/* * + * @tc.name: hdidevice_getdevicetype_002 + * @tc.desc: Verify the GetDeviceType function return unavailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getdevicetype_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + OH_NN_DeviceType deviceType = OH_NN_CPU; + V2_1::DeviceType iDeviceType = V2_1::DeviceType::CPU; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), GetDeviceType(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(iDeviceType), ::testing::Return(HDF_FAILURE))); + OH_NN_ReturnCode result = hdiDevice->GetDeviceType(deviceType); + EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result); +} + +/* * + * @tc.name: hdidevice_getdevicestatus_001 + * @tc.desc: Verify the GetDeviceStatus function validate device status success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getdevicestatus_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + V2_1::DeviceStatus iDeviceStatus = V2_1::DeviceStatus::AVAILABLE; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), GetDeviceStatus(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(iDeviceStatus), ::testing::Return(HDF_SUCCESS))); + + const DeviceStatus expectDeviceStatus = AVAILABLE; + DeviceStatus newDeviceStatus = AVAILABLE; + OH_NN_ReturnCode result = hdiDevice->GetDeviceStatus(newDeviceStatus); + EXPECT_EQ(OH_NN_SUCCESS, result); + EXPECT_EQ(expectDeviceStatus, newDeviceStatus); +} + +/* * + * @tc.name: hdidevice_getdevicestatus_002 + * @tc.desc: Verify the GetDeviceStatus function return unavailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getdevicestatus_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + DeviceStatus deviceStatus = AVAILABLE; + V2_1::DeviceStatus iDeviceStatus = V2_1::DeviceStatus::AVAILABLE; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), GetDeviceStatus(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(iDeviceStatus), ::testing::Return(HDF_FAILURE))); + OH_NN_ReturnCode result = hdiDevice->GetDeviceStatus(deviceStatus); + EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result); +} + +/* * + * @tc.name: hdidevice_getsupportedoperation_001 + * @tc.desc: Verify the GetSupportedOperation function return success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getsupportedoperation_001, TestSize.Level0) +{ + std::vector ops {true}; + std::shared_ptr model = std::make_shared(); + EXPECT_NE(nullptr, model); + BuildLiteGraph(model); + + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + V2_1::SharedBuffer buffer {1, 1, 0, 1}; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), AllocateBuffer(::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<1>(buffer), ::testing::Return(HDF_SUCCESS))); + + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), GetSupportedOperation(::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<1>(ops), ::testing::Return(HDF_SUCCESS))); + + std::vector newOps {true}; + const std::vector expectOps {true}; + OH_NN_ReturnCode result = hdiDevice->GetSupportedOperation(model, newOps); + EXPECT_EQ(OH_NN_FAILED, result); + auto expectOpsSize = expectOps.size(); + for (size_t i = 0; i < expectOpsSize; ++i) { + EXPECT_EQ(expectOps[i], newOps[i]); + } +} + +/* * + * @tc.name: hdidevice_getsupportedoperation_002 + * @tc.desc: Verify the GetSupportedOperation function return failed in case of allocate buffer failure. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getsupportedoperation_002, TestSize.Level0) +{ + std::vector ops; + std::shared_ptr model = std::make_shared(); + EXPECT_NE(nullptr, model); + BuildLiteGraph(model); + + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + V2_1::SharedBuffer buffer {1, 1, 0, 1}; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), AllocateBuffer(::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<1>(buffer), ::testing::Return(HDF_FAILURE))); + + OH_NN_ReturnCode result = hdiDevice->GetSupportedOperation(model, ops); + EXPECT_EQ(OH_NN_FAILED, result); +} + +/* * + * @tc.name: hdidevice_getsupportedoperation_003 + * @tc.desc: Verify the GetSupportedOperation function return nullptr. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getsupportedoperation_003, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + std::shared_ptr model = nullptr; + std::vector ops; + OH_NN_ReturnCode result = hdiDevice->GetSupportedOperation(model, ops); + EXPECT_EQ(OH_NN_NULL_PTR, result); +} + +/* * + * @tc.name: hdidevice_getsupportedoperation_004 + * @tc.desc: Verify the GetSupportedOperation function return unavalidable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_getsupportedoperation_004, TestSize.Level0) +{ + std::vector ops {true}; + std::shared_ptr model = std::make_shared(); + EXPECT_NE(nullptr, model); + BuildLiteGraph(model); + + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + V2_1::SharedBuffer buffer {2, 1, 0, 1}; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), AllocateBuffer(::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<1>(buffer), ::testing::Return(HDF_SUCCESS))); + + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), GetSupportedOperation(::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<1>(ops), ::testing::Return(HDF_FAILURE))); + + std::vector newOps {true}; + OH_NN_ReturnCode result = hdiDevice->GetSupportedOperation(model, newOps); + EXPECT_EQ(OH_NN_FAILED, result); +} + +/* * + * @tc.name: hdidevice_isfloat16precisionsupported_001 + * @tc.desc: Verify the IsFloat16PrecisionSupported function return success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_isfloat16precisionsupported_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + bool isSupported = false; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), IsFloat16PrecisionSupported(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(isSupported), ::testing::Return(HDF_SUCCESS))); + OH_NN_ReturnCode result = hdiDevice->IsFloat16PrecisionSupported(isSupported); + EXPECT_EQ(OH_NN_SUCCESS, result); +} + +/* * + * @tc.name: hdidevice_isfloat16precisionsupported_002 + * @tc.desc: Verify the IsFloat16PrecisionSupported function return unavailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_isfloat16precisionsupported_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + bool isSupported = false; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), IsFloat16PrecisionSupported(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(isSupported), ::testing::Return(HDF_FAILURE))); + OH_NN_ReturnCode result = hdiDevice->IsFloat16PrecisionSupported(isSupported); + EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result); +} + +/* * + * @tc.name: hdidevice_isperformancemodesupported_001 + * @tc.desc: Verify the IsPerformanceModeSupported function return success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_isperformancemodesupported_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + bool isSupported = false; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), IsPerformanceModeSupported(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(isSupported), ::testing::Return(HDF_SUCCESS))); + + bool newIsSupported = false; + const bool expectIsSupported = false; + OH_NN_ReturnCode result = hdiDevice->IsPerformanceModeSupported(newIsSupported); + EXPECT_EQ(OH_NN_SUCCESS, result); + EXPECT_EQ(expectIsSupported, newIsSupported); +} + +/* * + * @tc.name: hdidevice_isperformancemodesupported_002 + * @tc.desc: Verify the IsPerformanceModeSupported function return unavailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_isperformancemodesupported_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + bool isSupported = false; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), IsPerformanceModeSupported(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(isSupported), ::testing::Return(HDF_FAILURE))); + OH_NN_ReturnCode result = hdiDevice->IsPerformanceModeSupported(isSupported); + EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result); +} + +/* * + * @tc.name: hdidevice_isprioritysupported_001 + * @tc.desc: Verify the IsPrioritySupported function return success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_isprioritysupported_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + bool isSupported = false; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), IsPrioritySupported(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(isSupported), ::testing::Return(HDF_SUCCESS))); + + bool newIsSupported = false; + bool expectIsSupported = false; + OH_NN_ReturnCode result = hdiDevice->IsPrioritySupported(newIsSupported); + EXPECT_EQ(OH_NN_SUCCESS, result); + EXPECT_EQ(newIsSupported, expectIsSupported); +} + +/* * + * @tc.name: hdidevice_isprioritysupported_002 + * @tc.desc: Verify the IsPrioritySupported function return unavailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_isprioritysupported_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + bool isSupported = false; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), IsPrioritySupported(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(isSupported), ::testing::Return(HDF_FAILURE))); + OH_NN_ReturnCode result = hdiDevice->IsPrioritySupported(isSupported); + EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result); +} + +/* * + * @tc.name: hdidevice_isdynamicinputsupported_001 + * @tc.desc: Verify the IsDynamicInputSupported function return success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_isdynamicinputsupported_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + bool isSupported = false; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), IsDynamicInputSupported(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(isSupported), ::testing::Return(HDF_SUCCESS))); + + bool newIsSupported = false; + bool expectIsSupported = false; + OH_NN_ReturnCode result = hdiDevice->IsDynamicInputSupported(newIsSupported); + EXPECT_EQ(OH_NN_SUCCESS, result); + EXPECT_EQ(newIsSupported, expectIsSupported); +} + +/* * + * @tc.name: hdidevice_isdynamicinputsupported_002 + * @tc.desc: Verify the IsDynamicInputSupported function return unavailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_isdynamicinputsupported_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + bool isSupported = false; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), IsDynamicInputSupported(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(isSupported), ::testing::Return(HDF_FAILURE))); + OH_NN_ReturnCode result = hdiDevice->IsDynamicInputSupported(isSupported); + EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result); +} + +/* * + * @tc.name: hdidevice_isdynamicinputsupported_001 + * @tc.desc: Verify the IsModelCacheSupported function return success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_ismodelcachesupported_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + bool isSupported = false; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), IsModelCacheSupported(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(isSupported), ::testing::Return(HDF_SUCCESS))); + + bool newIsSupported = false; + bool expectIsSupported = false; + OH_NN_ReturnCode result = hdiDevice->IsModelCacheSupported(newIsSupported); + EXPECT_EQ(OH_NN_SUCCESS, result); + EXPECT_EQ(expectIsSupported, newIsSupported); +} + +/* * + * @tc.name: hdidevice_isdynamicinputsupported_002 + * @tc.desc: Verify the IsModelCacheSupported function return unavailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_ismodelcachesupported_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + bool isSupported = false; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), IsModelCacheSupported(::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(isSupported), ::testing::Return(HDF_FAILURE))); + OH_NN_ReturnCode result = hdiDevice->IsModelCacheSupported(isSupported); + EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result); +} + +/* * + * @tc.name: hdidevice_preparemodel_001 + * @tc.desc: Verify the PrepareModel function return success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_preparemodel_001, TestSize.Level0) +{ + int32_t allocBufferType = HDF_SUCCESS; + int32_t prepareType = HDF_SUCCESS; + OH_NN_ReturnCode result = PrepareModel(allocBufferType, prepareType); + EXPECT_EQ(OH_NN_FAILED, result); +} + +/* * + * @tc.name: hdidevice_preparemodel_002 + * @tc.desc: Verify the PrepareModel function return invalid parameter. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_preparemodel_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + std::shared_ptr model = nullptr; + ModelConfig config; + Buffer quantBuffer; + std::shared_ptr preparedModel; + OH_NN_ReturnCode result = hdiDevice->PrepareModel(model, quantBuffer, config, preparedModel); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, result); +} + +/* * + * @tc.name: hdidevice_preparemodel_003 + * @tc.desc: Verify the PrepareModel function return failed. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_preparemodel_003, TestSize.Level0) +{ + int32_t allocBufferType = HDF_SUCCESS; + int32_t prepareType = HDF_FAILURE; + OH_NN_ReturnCode result = PrepareModel(allocBufferType, prepareType); + EXPECT_EQ(OH_NN_FAILED, result); +} + +/* * + * @tc.name: hdidevice_preparemodel_004 + * @tc.desc: Verify the PrepareModel function return failed. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_preparemodel_004, TestSize.Level0) +{ + int32_t allocBufferType = HDF_FAILURE; + int32_t prepareType = HDF_FAILURE; + OH_NN_ReturnCode result = PrepareModel(allocBufferType, prepareType); + EXPECT_EQ(OH_NN_FAILED, result); +} + +/* * + * @tc.name: hdidevice_preparemodelfrommodelcache_001 + * @tc.desc: Verify the PrepareModelFromModelCache function return success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_preparemodelfrommodelcache_001, TestSize.Level0) +{ + size_t length = 100; + void *buffer = nullptr; + GetBuffer(buffer, length); + + std::vector modelCache = { { buffer, 100 } }; + ModelConfig config; + + OHOS::sptr sp = OHOS::sptr(new (std::nothrow) V2_1::MockIDevice()); + EXPECT_NE(sp, nullptr); + + std::unique_ptr hdiDevice = std::make_unique(sp); + EXPECT_NE(hdiDevice, nullptr); + + std::shared_ptr preparedModel; + + OHOS::sptr iPreparedModel = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_CALL(*sp, PrepareModelFromModelCache(::testing::_, ::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<2>(iPreparedModel), ::testing::Return(HDF_SUCCESS))); + + OH_NN_ReturnCode result = hdiDevice->PrepareModelFromModelCache(modelCache, config, preparedModel); + const auto &memoryManager = MemoryManager::GetInstance(); + memoryManager->UnMapMemory(buffer); + EXPECT_EQ(OH_NN_SUCCESS, result); +} + +/* * + * @tc.name: hdidevice_preparemodelfrommodelcache_002 + * @tc.desc: Verify the PrepareModelFromModelCache function return unavailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_preparemodelfrommodelcache_002, TestSize.Level0) +{ + size_t length = 100; + void *buffer = nullptr; + GetBuffer(buffer, length); + + OHOS::sptr sp = OHOS::sptr(new (std::nothrow) V2_1::MockIDevice()); + EXPECT_NE(sp, nullptr); + + std::unique_ptr hdiDevice = std::make_unique(sp); + EXPECT_NE(hdiDevice, nullptr); + + std::vector modelCache = { { buffer, 100 } }; + ModelConfig config; + OHOS::sptr preModel = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_NE(preModel, nullptr); + + std::shared_ptr preparedModel = std::make_shared(preModel); + + OHOS::sptr iPreparedModel = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel); + EXPECT_CALL(*sp, PrepareModelFromModelCache(::testing::_, ::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<2>(iPreparedModel), ::testing::Return(HDF_FAILURE))); + + OH_NN_ReturnCode result = hdiDevice->PrepareModelFromModelCache(modelCache, config, preparedModel); + EXPECT_EQ(OH_NN_FAILED, result); +} + +/* * + * @tc.name: hdidevice_preparemodelfrommodelcache_003 + * @tc.desc: Verify the PrepareModelFromModelCache function return nullptr. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_preparemodelfrommodelcache_003, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + std::vector modelCache = { { nullptr, 0 } }; + ModelConfig config; + std::shared_ptr preparedModel; + OH_NN_ReturnCode result = hdiDevice->PrepareModelFromModelCache(modelCache, config, preparedModel); + EXPECT_EQ(OH_NN_NULL_PTR, result); +} + +/* * + * @tc.name: hdidevice_allocatebuffer_001 + * @tc.desc: Verify the AllocateBuffer function return nullptr. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_allocatebuffer_001, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + V2_1::SharedBuffer buffer; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), AllocateBuffer(::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<1>(buffer), ::testing::Return(HDF_FAILURE))); + + size_t length = 8; + void *result = hdiDevice->AllocateBuffer(length); + EXPECT_EQ(nullptr, result); + hdiDevice->ReleaseBuffer(result); +} + +/* * + * @tc.name: hdidevice_allocatebuffer_002 + * @tc.desc: Verify the AllocateBuffer function return nullptr and HDF_FAILURE. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_allocatebuffer_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + size_t length = 8; + void *result = hdiDevice->AllocateBuffer(length); + EXPECT_EQ(nullptr, result); + hdiDevice->ReleaseBuffer(result); +} + +/* * + * @tc.name: hdidevice_allocatebuffer_003 + * @tc.desc: Verify the AllocateBuffer function return nullptr in case of 0 size. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_allocatebuffer_003, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + size_t length = 0; + void *result = hdiDevice->AllocateBuffer(length); + EXPECT_EQ(nullptr, result); +} + +/* * + * @tc.name: hdidevice_releasebuffer_001 + * @tc.desc: Verify the ReleaseBuffer function validate buffer success. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_releasebuffer_001, TestSize.Level0) +{ + size_t length = 100; + void *buffer = nullptr; + GetBuffer(buffer, length); + + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), ReleaseBuffer(::testing::_)) + .WillRepeatedly(::testing::Return(HDF_SUCCESS)); + + EXPECT_NE(hdiDevice, nullptr); + hdiDevice->ReleaseBuffer(buffer); + const auto &memoryManager = MemoryManager::GetInstance(); + memoryManager->UnMapMemory(buffer); +} + +/* * + * @tc.name: hdidevice_releasebuffer_002 + * @tc.desc: Verify the ReleaseBuffer function validate AllocateBuffer return nullptr. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_releasebuffer_002, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + V2_1::SharedBuffer sharedbuffer; + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), AllocateBuffer(::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<1>(sharedbuffer), ::testing::Return(HDF_FAILURE))); + + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), ReleaseBuffer(::testing::_)) + .WillRepeatedly(::testing::Return(HDF_FAILURE)); + + size_t length = 8; + void *buffer = hdiDevice->AllocateBuffer(length); + hdiDevice->ReleaseBuffer(buffer); +} + +/* * + * @tc.name: hdidevice_releasebuffer_003 + * @tc.desc: Verify the ReleaseBuffer function validate param buffer is nullptr. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_releasebuffer_003, TestSize.Level0) +{ + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + void *buffer = nullptr; + hdiDevice->ReleaseBuffer(buffer); +} + +/* * + * @tc.name: hdidevice_releasebuffer_004 + * @tc.desc: Verify the ReleaseBuffer function validate invalid buffer. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_releasebuffer_004, TestSize.Level0) +{ + const size_t length = 100; + auto* buffer = new(std::nothrow) char[length]; + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + hdiDevice->ReleaseBuffer(buffer); + delete[] buffer; + buffer = nullptr; +} + +/* * + * @tc.name: hdidevice_releasebuffer_005 + * @tc.desc: Verify the ReleaseBuffer function validate moc object's ReleaseBuffer return failure. + * @tc.type: FUNC + */ +HWTEST_F(HDIDeviceTest, hdidevice_releasebuffer_005, TestSize.Level0) +{ + size_t length = 100; + void *buffer = nullptr; + GetBuffer(buffer, length); + + OHOS::sptr device = V2_1::INnrtDevice::Get(false); + std::unique_ptr hdiDevice = std::make_unique(device); + EXPECT_NE(hdiDevice, nullptr); + + EXPECT_CALL(*((V2_1::MockIDevice *)device.GetRefPtr()), ReleaseBuffer(::testing::_)) + .WillRepeatedly(::testing::Return(HDF_FAILURE)); + + hdiDevice->ReleaseBuffer(buffer); + const auto &memoryManager = MemoryManager::GetInstance(); + memoryManager->UnMapMemory(buffer); +} +} // namespace UnitTest +} // namespace NeuralNetworkRuntime +} // namespace OHOS diff --git a/test/unittest/components/v2_1/hdi_prepared_model/hdi_prepared_model_test.cpp b/test/unittest/components/v2_1/hdi_prepared_model/hdi_prepared_model_test.cpp new file mode 100644 index 0000000..82a6c27 --- /dev/null +++ b/test/unittest/components/v2_1/hdi_prepared_model/hdi_prepared_model_test.cpp @@ -0,0 +1,341 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#include +#include + +#include "common/log.h" +#include "hdi_prepared_model_v2_1.h" +#include "memory_manager.h" +#include "transform.h" +#include "test/unittest/common/v2_1/mock_idevice.h" +#include "test/unittest/common/file_utils.h" + +using namespace testing; +using namespace testing::ext; +using namespace OHOS::NeuralNetworkRuntime; +namespace OHOS { +namespace NeuralNetworkRuntime { +namespace UnitTest { +class HDIPreparedModelTest : public testing::Test { +protected: + void GetBuffer(void*& buffer, size_t length); + void InitTensor(std::vector& inputs, void* buffer, size_t length); + OH_NN_ReturnCode Run(std::vector& inputs); +}; + +void HDIPreparedModelTest::GetBuffer(void*& buffer, size_t length) +{ + std::string data = "ABCD"; + const size_t dataLength = 100; + data.resize(dataLength, '-'); + + std::string filename = "/data/log/memory-001.dat"; + FileUtils fileUtils(filename); + fileUtils.WriteFile(data); + + int fd = open(filename.c_str(), O_RDWR); + EXPECT_NE(-1, fd); + + const auto& memoryManager = MemoryManager::GetInstance(); + buffer = memoryManager->MapMemory(fd, length); + close(fd); +} + +void HDIPreparedModelTest::InitTensor(std::vector& inputs, void* buffer, size_t length) +{ + IOTensor inputTensor; + inputTensor.dataType = OH_NN_INT8; + inputTensor.dataType = OH_NN_INT8; + inputTensor.format = OH_NN_FORMAT_NCHW; + inputTensor.data = buffer; + inputTensor.length = length; + inputs.emplace_back(std::move(inputTensor)); +} + +OH_NN_ReturnCode HDIPreparedModelTest::Run(std::vector& inputs) +{ + const int vvPosition = 2; + std::vector outputs; + std::vector> outputsDims {{0}}; + std::vector isOutputBufferEnough {}; + + OHOS::sptr sp = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_NE(sp, nullptr); + + std::unique_ptr preparedModel = std::make_unique(sp); + EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_)) + .WillRepeatedly(::testing::DoAll( + ::testing::SetArgReferee(outputsDims), + ::testing::Return(HDF_SUCCESS)) + ); + + OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough); + return result; +} + +/** + * @tc.name: hidpreparedmodel_constructor_001 + * @tc.desc: Verify the Constructor function validate constructor success. + * @tc.type: FUNC + */ +HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_constructor_001, TestSize.Level0) +{ + OHOS::sptr hdiPreparedModel = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_NE(hdiPreparedModel, nullptr); + + std::unique_ptr preparedModel = std::make_unique(hdiPreparedModel); + EXPECT_NE(preparedModel, nullptr); +} + +/** + * @tc.name: hidpreparedmodel_exportmodelcache_001 + * @tc.desc: Verify the ExportModelCache function return memory error. + * @tc.type: FUNC + */ +HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_001, TestSize.Level0) +{ + std::vector bufferVect = {{100, 100, 0, 100}}; + OHOS::sptr hdiPreparedModel = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + std::unique_ptr preparedModel = std::make_unique(hdiPreparedModel); + std::vector modelCache; + EXPECT_CALL(*((V2_1::MockIPreparedModel*)hdiPreparedModel.GetRefPtr()), + ExportModelCache(::testing::_)) + .WillRepeatedly( + ::testing::DoAll( + ::testing::SetArgReferee<0>(bufferVect), + ::testing::Return(HDF_SUCCESS) + ) + ); + + OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache); + EXPECT_EQ(OH_NN_MEMORY_ERROR, result); +} + +/** + * @tc.name: hidpreparedmodel_exportmodelcache_002 + * @tc.desc: Verify the ExportModelCache function return success. + * @tc.type: FUNC + */ +HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_002, TestSize.Level0) +{ + std::vector bufferVect; + OHOS::sptr mockPreparedModel = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_NE(mockPreparedModel, nullptr); + + std::unique_ptr preparedModel = std::make_unique(mockPreparedModel); + std::vector modelCache; + EXPECT_CALL(*((V2_1::MockIPreparedModel*)mockPreparedModel.GetRefPtr()), + ExportModelCache(::testing::_)) + .WillRepeatedly( + ::testing::DoAll( + ::testing::SetArgReferee<0>(bufferVect), + ::testing::Return(HDF_SUCCESS) + ) + ); + + OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache); + EXPECT_EQ(OH_NN_SUCCESS, result); +} + +/** + * @tc.name: hidpreparedmodel_exportmodelcache_003 + * @tc.desc: Verify the ExportModelCache function return invalid parameter. + * @tc.type: FUNC + */ +HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_003, TestSize.Level0) +{ + OHOS::sptr hdiPreparedModel = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_NE(hdiPreparedModel, nullptr); + + std::unique_ptr preparedModel = std::make_unique(hdiPreparedModel); + std::vector modelCache; + OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache); + EXPECT_EQ(OH_NN_SUCCESS, result); +} + +/** + * @tc.name: hidpreparedmodel_exportmodelcache_004 + * @tc.desc: Verify the ExportModelCache function return unvailable device. + * @tc.type: FUNC + */ +HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_004, TestSize.Level0) +{ + std::vector bufferVect = {{100, 100, 0, 100}}; + OHOS::sptr mockPreparedModel = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_NE(mockPreparedModel, nullptr); + + std::unique_ptr preparedModel = std::make_unique(mockPreparedModel); + std::vector modelCache; + EXPECT_CALL(*((V2_1::MockIPreparedModel*)mockPreparedModel.GetRefPtr()), + ExportModelCache(::testing::_)) + .WillRepeatedly( + ::testing::DoAll( + ::testing::SetArgReferee<0>(bufferVect), + ::testing::Return(HDF_FAILURE) + ) + ); + + OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache); + EXPECT_EQ(OH_NN_SAVE_CACHE_EXCEPTION, result); +} + +/** + * @tc.name: hidpreparedmodel_run_001 + * @tc.desc: Verify the Run function return invalid parameter. + * @tc.type: FUNC + */ +HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_001, TestSize.Level0) +{ + IOTensor inputTensor; + inputTensor.dataType = OH_NN_INT8; + + IOTensor outputTensor; + outputTensor.dataType = OH_NN_INT8; + std::vector inputs; + inputs.emplace_back(std::move(inputTensor)); + std::vector outputs; + + std::vector iOutputTensors; + V2_1::IOTensor iTensor; + iOutputTensors.emplace_back(iTensor); + std::vector> outputsDims {{0}}; + std::vector isOutputBufferEnough {}; + + std::shared_ptr sp = std::make_shared(); + OHOS::sptr hdiPreparedModel = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_NE(hdiPreparedModel, nullptr); + + std::unique_ptr preparedModel = std::make_unique(hdiPreparedModel); + OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, result); +} + +/** + * @tc.name: hidpreparedmodel_run_002 + * @tc.desc: Verify the Run function return success. + * @tc.type: FUNC + */ +HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_002, TestSize.Level0) +{ + const size_t length = 100; + void* buffer = nullptr; + GetBuffer(buffer, length); + + std::vector inputs; + std::vector outputs; + InitTensor(inputs, buffer, length); + + OH_NN_ReturnCode result = Run(inputs); + EXPECT_EQ(OH_NN_SUCCESS, result); + const auto& memoryManager = MemoryManager::GetInstance(); + memoryManager->UnMapMemory(buffer); +} + +/** + * @tc.name: hidpreparedmodel_run_003 + * @tc.desc: Verify the Run function return unavailable device in case of run failure. + * @tc.type: FUNC + */ +HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_003, TestSize.Level0) +{ + const size_t length = 100; + void* buffer = nullptr; + GetBuffer(buffer, length); + + std::vector inputs; + std::vector outputs; + InitTensor(inputs, buffer, length); + + std::vector> outputsDims {}; + std::vector isOutputBufferEnough {}; + + OHOS::sptr sp = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_NE(sp, nullptr); + + std::unique_ptr preparedModel = std::make_unique(sp); + + EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_)) + .WillRepeatedly( + ::testing::DoAll( + ::testing::SetArgReferee<2>(outputsDims), + ::testing::Return(HDF_FAILURE) + ) + ); + + OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough); + EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result); + const auto& memoryManager = MemoryManager::GetInstance(); + memoryManager->UnMapMemory(buffer); +} + +/** + * @tc.name: hidpreparedmodel_run_004 + * @tc.desc: Verify the Run function return invalid parameter. + * @tc.type: FUNC + */ +HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_004, TestSize.Level0) +{ + std::vector inputs; + InitTensor(inputs, nullptr, 0); + OH_NN_ReturnCode result = Run(inputs); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, result); +} + +/** + * @tc.name: hidpreparedmodel_run_005 + * @tc.desc: Verify the Run function return invalid parameter in case of output invalid. + * @tc.type: FUNC + */ +HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_005, TestSize.Level0) +{ + const size_t length = 100; + void* buffer = nullptr; + GetBuffer(buffer, length); + + std::vector inputs; + std::vector outputs; + InitTensor(inputs, buffer, length); + InitTensor(outputs, nullptr, 0); + + std::vector> outputsDims {}; + std::vector isOutputBufferEnough {}; + + OHOS::sptr sp = + OHOS::sptr(new (std::nothrow) V2_1::MockIPreparedModel()); + EXPECT_NE(sp, nullptr); + + std::unique_ptr preparedModel = std::make_unique(sp); + + OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough); + EXPECT_EQ(OH_NN_INVALID_PARAMETER, result); + const auto& memoryManager = MemoryManager::GetInstance(); + memoryManager->UnMapMemory(buffer); +} +} // namespace UnitTest +} // namespace NeuralNetworkRuntime +} // namespace OHOS -- Gitee From e6daaf2fba896d48b4a0cc00daf39e36e218cf0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=91=A8=E7=BF=94?= Date: Fri, 14 Jun 2024 10:48:56 +0800 Subject: [PATCH 2/4] =?UTF-8?q?UT=E7=94=A8=E4=BE=8B=E6=96=B0=E5=A2=9E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 周翔 --- test/unittest/components/BUILD.gn | 2 +- .../components/nn_backend/nn_backend_test.cpp | 166 ------------------ .../nn_compiled_cache_test.cpp | 123 ------------- .../nn_compiler/nn_compiler_test.cpp | 11 -- 4 files changed, 1 insertion(+), 301 deletions(-) diff --git a/test/unittest/components/BUILD.gn b/test/unittest/components/BUILD.gn index 8668c06..4a85d8d 100644 --- a/test/unittest/components/BUILD.gn +++ b/test/unittest/components/BUILD.gn @@ -759,7 +759,6 @@ group("components_unittest") { ":InnerModelV1_0Test", ":InnerModelV2_0Test", ":MemoryManagerTest", - ":QuantParamsTest", ":NNBackendTest", ":NNCompiledCacheTest", ":NNCompilerTest", @@ -771,6 +770,7 @@ group("components_unittest") { ":NnValidationV2_0Test", ":OpsRegistryV1_0Test", ":OpsRegistryV2_0Test", + ":QuantParamsTest", ":TransformV1_0Test", ":TransformV2_0Test", ] diff --git a/test/unittest/components/nn_backend/nn_backend_test.cpp b/test/unittest/components/nn_backend/nn_backend_test.cpp index b280303..37ba923 100644 --- a/test/unittest/components/nn_backend/nn_backend_test.cpp +++ b/test/unittest/components/nn_backend/nn_backend_test.cpp @@ -91,7 +91,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_construct_001, TestSize.Level0) HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_001, TestSize.Level0) { size_t backendID = 1; - // std::shared_ptr device = std::make_shared(); std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); std::string backendName = "mock"; EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(backendName)); @@ -108,8 +107,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_002, TestSize.Level0) std::shared_ptr device = std::make_shared(); std::string backendName = "mock"; - // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) - // .WillRepeatedly(::testing::Return(OH_NN_SUCCESS)); EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); @@ -118,24 +115,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_002, TestSize.Level0) EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(backendName)); } -// /** -// * @tc.name: nnbackendtest_getbackendname_003 -// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. -// * @tc.type: FUNC -// */ -// HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_003, TestSize.Level0) -// { -// size_t backendID = 1; -// std::shared_ptr device = std::make_shared(); - -// std::string deviceName = "mock"; -// EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) -// .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(OH_NN_SUCCESS))); - -// std::unique_ptr hdiDevice = std::make_unique(device, backendID); -// EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(deviceName)); -// } - /** * @tc.name: nnbackendtest_getbackendname_005 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. @@ -146,7 +125,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_005, TestSize.Level0) size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // std::string deviceName; std::string backendName = "mock"; EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); @@ -158,28 +136,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_005, TestSize.Level0) EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(backendName)); } -// /** -// * @tc.name: nnbackendtest_getbackendname_006 -// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. -// * @tc.type: FUNC -// */ -// HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_006, TestSize.Level0) -// { -// size_t backendID = 1; -// std::shared_ptr device = std::make_shared(); - -// std::string deviceName; -// std::string backendName = "mock"; -// EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) -// .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(OH_NN_SUCCESS))); - -// EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_)) -// .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(deviceName), ::testing::Return(OH_NN_SUCCESS))); - -// std::unique_ptr hdiDevice = std::make_unique(device, backendID); -// EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendName(backendName)); -// } - /** * @tc.name: nnbackendtest_getbackendname_007 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. @@ -190,7 +146,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_007, TestSize.Level0) size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // std::string deviceName; std::string backendName = "mock"; EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); @@ -215,7 +170,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_008, TestSize.Level0) size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // std::string deviceName; std::string backendName = "mock"; EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_)) .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS))); @@ -238,11 +192,8 @@ HWTEST_F(NNBackendTest, nnbackendtest_getbackendname_008, TestSize.Level0) HWTEST_F(NNBackendTest, nnbackendtest_getgackendtype_001, TestSize.Level0) { size_t backendID = 1; - // std::shared_ptr device = std::make_shared(); OH_NN_DeviceType backendName = OH_NN_OTHERS; - // EXPECT_CALL(*((MockIDevice *) device.get()), GetBackendType(::testing::_)) - // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendType(backendName)); @@ -292,11 +243,8 @@ HWTEST_F(NNBackendTest, nnbackendtest_getgackendtype_003, TestSize.Level0) HWTEST_F(NNBackendTest, nnbackendtest_getbackendstatus_001, TestSize.Level0) { size_t backendID = 1; - // std::shared_ptr device = std::make_shared(); DeviceStatus backendName = UNKNOWN; - // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) - // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetBackendStatus(backendName)); @@ -346,12 +294,9 @@ HWTEST_F(NNBackendTest, nnbackendtest_getbackendstatus_003, TestSize.Level0) HWTEST_F(NNBackendTest, nnbackendtest_createcompiler_001, TestSize.Level0) { size_t backendID = 1; - // std::shared_ptr device = std::make_shared(); Compilation backendName; Compilation* compilation = &backendName; - // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) - // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); EXPECT_NE(nullptr, hdiDevice->CreateCompiler(compilation)); @@ -374,35 +319,11 @@ HWTEST_F(NNBackendTest, nnbackendtest_createcompiler_002, TestSize.Level0) backendName.offlineModelBuffer.first = &b; backendName.offlineModelBuffer.second = static_cast(0); Compilation* compilation = &backendName; - // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) - // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); std::unique_ptr hdiDevice = std::make_unique(device, backendID); EXPECT_EQ(nullptr, hdiDevice->CreateCompiler(compilation)); } -/** - * @tc.name: nnbackendtest_createcompiler_003 - * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. - * @tc.type: FUNC - */ -// HWTEST_F(NNBackendTest, nnbackendtest_createcompiler_003, TestSize.Level0) -// { -// size_t backendID = 1; -// std::shared_ptr device = std::make_shared(); - -// Compilation backendName; -// char a = 'a'; -// backendName.nnModel = &a; -// Compilation* compilation = &backendName; - -// // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) -// // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); - -// std::unique_ptr hdiDevice = std::make_unique(device, backendID); -// EXPECT_EQ(nullptr, hdiDevice->CreateCompiler(compilation)); -// } - /** * @tc.name: nnbackendtest_destroycompiler_001 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. @@ -411,13 +332,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_createcompiler_002, TestSize.Level0) HWTEST_F(NNBackendTest, nnbackendtest_destroycompiler_001, TestSize.Level0) { size_t backendID = 1; - // std::shared_ptr device = std::make_shared(); - - // Compilation backendName; - // Compiler* compilation = backendName.compiler; - - // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) - // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); EXPECT_EQ(OH_NN_INVALID_PARAMETER, hdiDevice->DestroyCompiler(nullptr)); @@ -433,14 +347,7 @@ HWTEST_F(NNBackendTest, nnbackendtest_destroycompiler_002, TestSize.Level0) size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // Compilation backendName; NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID); - // NNCompiler* nncompiler = &nncompiler; - - // Compiler* compilation = backendName.compiler; - - // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) - // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); std::unique_ptr hdiDevice = std::make_unique(device, backendID); EXPECT_EQ(OH_NN_SUCCESS, hdiDevice->DestroyCompiler(nncompiler)); @@ -456,12 +363,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_CreateExecutor_001, TestSize.Level0) size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // Compilation backendName; - // Compiler* compilation = backendName.compiler; - - // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) - // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); - std::unique_ptr hdiDevice = std::make_unique(device, backendID); EXPECT_EQ(nullptr, hdiDevice->CreateExecutor(nullptr)); } @@ -478,9 +379,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_CreateExecutor_002, TestSize.Level0) Compilation backendName; Compilation* compilation = &backendName; - - // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) - // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); std::unique_ptr hdiDevice = std::make_unique(device, backendID); EXPECT_EQ(nullptr, hdiDevice->CreateExecutor(compilation)); @@ -496,13 +394,7 @@ HWTEST_F(NNBackendTest, nnbackendtest_CreateExecutor_003, TestSize.Level0) size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // Compilation backendName; - // backendName.compiler - // Compilation* compilation = &backendName; Compilation *compilation = new (std::nothrow) Compilation(); - - // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) - // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); std::unique_ptr hdiDevice = std::make_unique(device, backendID); EXPECT_EQ(nullptr, hdiDevice->CreateExecutor(compilation)); @@ -518,39 +410,10 @@ HWTEST_F(NNBackendTest, nnbackendtest_DestroyExecutor_001, TestSize.Level0) size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // Executor backendName; - // Executor* executor = &backendName; - // Compilation *compilation = new (std::nothrow) Compilation(); - - // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) - // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); - std::unique_ptr hdiDevice = std::make_unique(device, backendID); EXPECT_EQ(OH_NN_INVALID_PARAMETER, hdiDevice->DestroyExecutor(nullptr)); } -/** - * @tc.name: nnbackendtest_DestroyExecutor_002 - * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. - * @tc.type: FUNC - */ -// HWTEST_F(NNBackendTest, nnbackendtest_DestroyExecutor_002, TestSize.Level0) -// { -// size_t backendID = 1; -// std::shared_ptr device = std::make_shared(); - -// Compilation *compilationImpl = new (std::nothrow) Compilation(); -// BackendManager& backendManager = BackendManager::GetInstance(); -// std::shared_ptr backend = backendManager.GetBackend(compilationImpl->backendID); -// Executor* executorImpl = backend->CreateExecutor(compilationImpl); - -// // EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceType(::testing::_)) -// // .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_FAILED))); - -// std::unique_ptr hdiDevice = std::make_unique(device, backendID); -// EXPECT_NE(OH_NN_SUCCESS, hdiDevice->DestroyExecutor(executorImpl)); -// } - /** * @tc.name: nnbackendtest_createtensor_001 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. @@ -595,27 +458,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_destroytensor_001, TestSize.Level0) EXPECT_EQ(OH_NN_INVALID_PARAMETER, hdiDevice->DestroyTensor(nullptr)); } -/** - * @tc.name: nnbackendtest_destroytensor_002 - * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. - * @tc.type: FUNC - */ -// HWTEST_F(NNBackendTest, nnbackendtest_destroytensor_002, TestSize.Level0) -// { -// size_t backendID = 1; -// std::shared_ptr device = std::make_shared(); - -// NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create(); -// BackendManager& backendManager = BackendManager::GetInstance(); -// size_t deviceID = 1; -// std::shared_ptr backend = backendManager.GetBackend(deviceID); -// TensorDesc* descImpl = reinterpret_cast(tensorDesc); -// Tensor* tensorImpl = backend->CreateTensor(descImpl); - -// std::unique_ptr hdiDevice = std::make_unique(device, backendID); -// EXPECT_EQ(OH_NN_SUCCESS, hdiDevice->DestroyTensor(tensorImpl)); -// } - /** * @tc.name: nnbackendtest_getdevice_001 * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. @@ -642,8 +484,6 @@ HWTEST_F(NNBackendTest, nnbackendtest_getsupportedoperation_001, TestSize.Level0 std::shared_ptr model = nullptr; std::vector ops; - // std::shared_ptr model = std::make_shared(); - // std::vector ops {true}; std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); EXPECT_EQ(OH_NN_INVALID_PARAMETER, hdiDevice->GetSupportedOperation(model, ops)); @@ -659,10 +499,8 @@ HWTEST_F(NNBackendTest, nnbackendtest_getsupportedoperation_002, TestSize.Level0 size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // std::shared_ptr model = nullptr; std::vector ops; std::shared_ptr model = std::make_shared(); - // std::vector ops {true}; std::unique_ptr hdiDevice = std::make_unique(nullptr, backendID); EXPECT_EQ(OH_NN_FAILED, hdiDevice->GetSupportedOperation(model, ops)); @@ -678,10 +516,8 @@ HWTEST_F(NNBackendTest, nnbackendtest_getsupportedoperation_003, TestSize.Level0 size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // std::shared_ptr model = nullptr; std::vector ops; std::shared_ptr model = std::make_shared(); - // std::vector ops {true}; EXPECT_CALL(*((MockIDevice *) device.get()), GetSupportedOperation(::testing::_, ::testing::_)) .WillRepeatedly(::testing::Return(OH_NN_FAILED)); @@ -700,10 +536,8 @@ HWTEST_F(NNBackendTest, nnbackendtest_getsupportedoperation_004, TestSize.Level0 size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // std::shared_ptr model = nullptr; std::vector ops; std::shared_ptr model = std::make_shared(); - // std::vector ops {true}; EXPECT_CALL(*((MockIDevice *) device.get()), GetSupportedOperation(::testing::_, ::testing::_)) .WillRepeatedly(::testing::Return(OH_NN_SUCCESS)); diff --git a/test/unittest/components/nn_compiled_cache/nn_compiled_cache_test.cpp b/test/unittest/components/nn_compiled_cache/nn_compiled_cache_test.cpp index 0a97102..8450eb6 100644 --- a/test/unittest/components/nn_compiled_cache/nn_compiled_cache_test.cpp +++ b/test/unittest/components/nn_compiled_cache/nn_compiled_cache_test.cpp @@ -87,129 +87,6 @@ HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_setmodelname_001, TestSize.Lev NNCompiledCache nncompiledCache; std::string m_modelName; } - -/** - * @tc.name: nncompiledcachetest_generatecachefiles_001 - * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. - * @tc.type: FUNC - */ -// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_generatecachefiles_001, TestSize.Level0) -// { -// NNCompiledCache nncompiledCache; -// std::vector caches; -// std::string m_cachePath = "a"; -// uint32_t m_cacheVersion = 1; - -// EXPECT_EQ(OH_NN_MEMORY_ERROR, nncompiledCache.GenerateCacheFiles(caches, m_cachePath, m_cacheVersion)); -// } - -// /** -// * @tc.name: nncompiledcachetest_generatecachemodel_001 -// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. -// * @tc.type: FUNC -// */ -// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_generatecachemodel_001, TestSize.Level0) -// { -// NNCompiledCache nncompiledCache; -// std::vector caches; -// const size_t cacheNumber = caches.size(); -// uint32_t cacheSize = 1; -// std::unique_ptr cacheInfo = CreateUniquePtr(cacheSize); -// std::string m_cachePath = "a"; -// uint32_t m_cacheVersion = 1; - -// EXPECT_EQ(OH_NN_MEOH_NN_INVALID_PARAMETERMORY_ERROR, nncompiledCache.GenerateCacheModel(caches, cacheInfo, m_cachePath, m_cacheVersion)); -// } - -// /** -// * @tc.name: nncompiledcachetest_writecacheinfo_001 -// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. -// * @tc.type: FUNC -// */ -// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_writecacheinfo_001, TestSize.Level0) -// { -// NNCompiledCache nncompiledCache; -// uint32_t infoCharNumber = cacheSize * sizeof(uint64_t); -// const size_t cacheNumber = caches.size(); -// uint32_t cacheSize = 1; -// std::unique_ptr cacheInfo = CreateUniquePtr(cacheSize); -// std::string m_cachePath = "a"; - -// EXPECT_EQ(OH_NN_INVALID_PARAMETER, nncompiledCache.WriteCacheInfo(infoCharNumber, cacheInfo, m_cachePath)); -// } - -// /** -// * @tc.name: nncompiledcachetest_checkcacheinfo_001 -// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. -// * @tc.type: FUNC -// */ -// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_checkcacheinfo_001, TestSize.Level0) -// { -// NNCompiledCache nncompiledCache; -// NNCompiledCacheInfo cacheInfo; -// std::string cacheInfoPath = "cache_info.nncache"; - -// EXPECT_EQ(OH_NN_INVALID_FILE, nncompiledCache.CheckCacheInfo(cacheInfo, cacheInfoPath)); -// } - -// /** -// * @tc.name: nncompiledcachetest_readcachemodelfile_001 -// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. -// * @tc.type: FUNC -// */ -// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_readcachemodelfile_001, TestSize.Level0) -// { -// NNCompiledCache nncompiledCache; -// std::string cacheModelPath = ".nncache"; -// OHOS::NeuralNetworkRuntime::Buffer modelBuffer; - -// EXPECT_EQ(OH_NN_INVALID_FILE, nncompiledCache.ReadCacheModelFile(cacheModelPath, modelBuffer)); -// } - -// /** -// * @tc.name: nncompiledcachetest_getcrc16_001 -// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. -// * @tc.type: FUNC -// */ -// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_getcrc16_001, TestSize.Level0) -// { -// NNCompiledCache nncompiledCache; -// std::vector caches; -// std::string cacheModelPath = ".nncache"; -// OHOS::NeuralNetworkRuntime::Buffer modelBuffer; - -// EXPECT_EQ(OH_NN_INVALID_FILE, nncompiledCache.GetCrc16(static_cast(caches[i].data), caches[i].length)); -// } - -// /** -// * @tc.name: nncompiledcachetest_getcachefilelength_001 -// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. -// * @tc.type: FUNC -// */ -// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_getcachefilelength_001, TestSize.Level0) -// { -// NNCompiledCache nncompiledCache; -// std::string filePath = ".nncache"; -// std::ifstream ifs(filePath.c_str(), std::ios::in | std::ios::binary); -// int fsize{-1}; - -// EXPECT_EQ(OH_NN_INVALID_FILE, nncompiledCache.GetCacheFileLength(ifs, fsize)); -// } - -// /** -// * @tc.name: nncompiledcachetest_verifycachepath_001 -// * @tc.desc: Verify the QuantParams function return nullptr in case of fd -1. -// * @tc.type: FUNC -// */ -// HWTEST_F(NNCompiledCacheTest, nncompiledcachetest_verifycachepath_001, TestSize.Level0) -// { -// NNCompiledCache nncompiledCache; -// char path[PATH_MAX]; - -// EXPECT_EQ(OH_NN_INVALID_FILE, nncompiledCache.VerifyCachePath(path)); -// } - - } // namespace UnitTest } // namespace NeuralNetworkRuntime } // namespace OHOS \ No newline at end of file diff --git a/test/unittest/components/nn_compiler/nn_compiler_test.cpp b/test/unittest/components/nn_compiler/nn_compiler_test.cpp index 934fa26..d970815 100644 --- a/test/unittest/components/nn_compiler/nn_compiler_test.cpp +++ b/test/unittest/components/nn_compiler/nn_compiler_test.cpp @@ -81,20 +81,9 @@ HWTEST_F(NNCompilerTest, nncompilertest_construct_001, TestSize.Level0) size_t backendID = 1; std::shared_ptr device = std::make_shared(); - // Compilation backendName; NNCompiler* nncompiler = new (std::nothrow) NNCompiler(device, backendID); EXPECT_NE(nullptr, nncompiler); - - // NNCompiler* nncompiler_o; - // EXPECT_EQ(nullptr, nncompiler_o); - - // Compilation backendName; - // Compilation* compilation = &backendName; - // NNCompiler* nncompiler_t = new (std::nothrow) NNCompiler(compilation->nnModel, device, backendID); - // EXPECT_EQ(nullptr, nncompiler_t); } - - } // namespace UnitTest } // namespace NeuralNetworkRuntime } // namespace OHOS \ No newline at end of file -- Gitee From 51b434fd891c7da3bdf0ee16f9427e8a79f355ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=91=A8=E7=BF=94?= Date: Mon, 17 Jun 2024 17:30:58 +0800 Subject: [PATCH 3/4] =?UTF-8?q?UT=E7=94=A8=E4=BE=8B=E6=96=B0=E5=A2=9E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 周翔 --- test/unittest/components/BUILD.gn | 4 ---- 1 file changed, 4 deletions(-) diff --git a/test/unittest/components/BUILD.gn b/test/unittest/components/BUILD.gn index 4a85d8d..87c333e 100644 --- a/test/unittest/components/BUILD.gn +++ b/test/unittest/components/BUILD.gn @@ -208,7 +208,6 @@ ohos_unittest("QuantParamsTest") { "drivers_interface_nnrt:libnnrt_proxy_1.0", "hilog:libhilog", "hitrace:libhitracechain", - "mindspore:mindir", ] } @@ -229,7 +228,6 @@ ohos_unittest("NNBackendTest") { "drivers_interface_nnrt:libnnrt_proxy_1.0", "hilog:libhilog", "hitrace:libhitracechain", - "mindspore:mindir", ] } @@ -250,7 +248,6 @@ ohos_unittest("NNCompiledCacheTest") { "drivers_interface_nnrt:libnnrt_proxy_1.0", "hilog:libhilog", "hitrace:libhitracechain", - "mindspore:mindir", ] } @@ -271,7 +268,6 @@ ohos_unittest("NNCompilerTest") { "drivers_interface_nnrt:libnnrt_proxy_1.0", "hilog:libhilog", "hitrace:libhitracechain", - "mindspore:mindir", ] } -- Gitee From 4e4b650d7e668d01340c539bf7e6c735ec3fb972 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=91=A8=E7=BF=94?= Date: Mon, 17 Jun 2024 18:55:17 +0800 Subject: [PATCH 4/4] =?UTF-8?q?UT=E7=94=A8=E4=BE=8B=E6=96=B0=E5=A2=9E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: 周翔 --- test/unittest/components/BUILD.gn | 1 - 1 file changed, 1 deletion(-) diff --git a/test/unittest/components/BUILD.gn b/test/unittest/components/BUILD.gn index 87c333e..3f447d5 100644 --- a/test/unittest/components/BUILD.gn +++ b/test/unittest/components/BUILD.gn @@ -595,7 +595,6 @@ ohos_unittest("HDIPreparedModelV2_1Test") { "drivers_interface_nnrt:libnnrt_proxy_2.0", "hilog:libhilog", "hitrace:libhitracechain", - "mindspore:mindir", ] } -- Gitee