diff --git a/config.gni b/config.gni index e15a060dce3adf9e4f10336992a6c56eee3bb890..c0961b7df808e88edf638f64a7a3d05c490a66cd 100644 --- a/config.gni +++ b/config.gni @@ -68,8 +68,8 @@ VIDEO_AISR_DIR = "$ALGORITHM_DIR/extensions/detail_enhancer CONTRAST_ENHANCER_DIR = "$ALGORITHM_DIR/contrast_enhancer" TEST_UTILS_PATH = "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/test/utils" -UNIT_TEST_OUTPUT_PATH = "video_processing_engine/unittest" -MODULE_TEST_OUTPUT_PATH = "video_processing_engine/moduletest" +UNIT_TEST_OUTPUT_PATH = "video_processing_engine/video_processing_engine/unittest" +MODULE_TEST_OUTPUT_PATH = "video_processing_engine/video_processing_engine/moduletest" VIDEO_PROCESSING_ENGINE_CFLAGS = [ "-std=c++17", diff --git a/framework/BUILD.gn b/framework/BUILD.gn index be44261a962c4a76e4bf2b45a408342ec242d16f..fda9fe165e76172ca6944e9fedda00e6a18f0b99 100644 --- a/framework/BUILD.gn +++ b/framework/BUILD.gn @@ -131,6 +131,20 @@ if (defined(global_parts_info) && part_name = "video_processing_engine" install_enable = true } + + ohos_prebuilt_shared_library("display_aipq_advanced_scale") { + if (is_asan && use_hwasan) { + source = "//binary/artifacts/display/AIPQ20/asan/libdisplay_aipq_advanced_scale.so" + } else { + source = "//binary/artifacts/display/AIPQ20/libdisplay_aipq_advanced_scale.so" + } + module_install_dir = "lib64/" + output = "libdisplay_aipq_advanced_scale.so" + install_images = [ "system" ] + subsystem_name = "multimedia" + part_name = "video_processing_engine" + install_enable = true + } } else { group("extream_vision_engine") { } @@ -140,6 +154,9 @@ if (defined(global_parts_info) && group("aihdr_engine") { } + + group("display_aipq_advanced_scale") { + } } ohos_shared_library("videoprocessingengine") { @@ -179,6 +196,10 @@ ohos_shared_library("videoprocessingengine") { "$ALGORITHM_DIR/common/algorithm_video_common.cpp", "$ALGORITHM_DIR/common/algorithm_video_impl.cpp", "$ALGORITHM_DIR/common/frame_info.cpp", + "$ALGORITHM_DIR/common/algorithm_environment.cpp", + "$ALGORITHM_DIR/common/image_openglsetup.cpp", + "$ALGORITHM_DIR/common/image_openclsetup.cpp", + "$ALGORITHM_DIR/common/image_opencl_wrapper.cpp", "$ALGORITHM_DIR/extension_manager/extension_manager.cpp", "$ALGORITHM_DIR/extension_manager/utils.cpp", "$COLORSPACE_CONVERTER_DIR/colorspace_converter_fwk.cpp", @@ -199,8 +220,6 @@ ohos_shared_library("videoprocessingengine") { "$VIDEO_PROCESSING_ENGINE_ROOT_DIR/services/utils/surface_buffer_info.cpp", "${target_gen_dir}/../services/video_processing_service_manager_proxy.cpp", "${target_gen_dir}/../services/video_processing_service_manager_stub.cpp", - "$ALGORITHM_COMMON_DIR/image_opencl_wrapper.cpp", - "$ALGORITHM_COMMON_DIR/image_openclsetup.cpp" ] deps = [ @@ -220,6 +239,7 @@ ohos_shared_library("videoprocessingengine") { external_deps = [ "c_utils:utils", "drivers_interface_display:libdisplay_commontype_proxy_2.1", + "ffrt:libffrt", "graphic_2d:2d_graphics", "graphic_2d:EGL", "graphic_2d:GLESv3", @@ -339,6 +359,7 @@ ohos_shared_library("image_processing") { external_deps = [ "c_utils:utils", "drivers_interface_display:display_commontype_idl_headers", + "ffrt:libffrt", "graphic_surface:surface", "graphic_2d:2d_graphics", "hilog:libhilog", @@ -350,6 +371,8 @@ ohos_shared_library("image_processing") { "media_foundation:media_foundation", "safwk:system_ability_fwk", "samgr:samgr_proxy", + "opencl-headers:libcl", + "egl:libEGL", ] innerapi_tags = [ "ndk" ] @@ -420,6 +443,7 @@ ohos_shared_library("video_processing") { external_deps = [ "c_utils:utils", + "ffrt:libffrt", "graphic_surface:surface", "hilog:libhilog", "hitrace:hitrace_meter", @@ -431,6 +455,7 @@ ohos_shared_library("video_processing") { "ipc:ipc_single", "safwk:system_ability_fwk", "samgr:samgr_proxy", + "opencl-headers:libcl", ] innerapi_tags = [ "ndk" ] diff --git a/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp b/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp index aa024898964a2e0994efcc9e82d110d4d15e2b82..7b26c5997f775050d1bcddfbef92de0d4a1ec523 100644 --- a/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp +++ b/framework/algorithm/colorspace_converter/colorspace_converter_fwk.cpp @@ -23,6 +23,7 @@ #include "EGL/egl.h" #include "surface_buffer_info.h" #include "video_processing_client.h" +#include "algorithm_environment.h" #include namespace OHOS { @@ -30,43 +31,12 @@ namespace Media { namespace VideoProcessingEngine { ColorSpaceConverterFwk::ColorSpaceConverterFwk() { - OpenCLInit(); - OpenGLInit(); + AlgorithmEnvironment::Get().InitializeCl(); + context.clContext = AlgorithmEnvironment::Get().GetClContext(); + AlgorithmEnvironment::Get().InitializeGl(); + context.glDisplay = AlgorithmEnvironment::Get().GetGLContext()->display; Extension::ExtensionManager::GetInstance().IncreaseInstance(); -} - -void ColorSpaceConverterFwk::OpenCLInit() -{ - void *OpenclFoundationHandle = nullptr; - std::string path = "/sys_prod/lib64/VideoProcessingEngine/libaihdr_engine.so"; - auto ret = access(path.c_str(), F_OK); - if (ret != 0) { - VPE_LOGW("access = %d path = %s", ret, path.c_str()); - } else { - constexpr int DEVICE_NAME_LENGTH = 32; // 32 max name length - char deviceName[DEVICE_NAME_LENGTH]; - auto status = SetupOpencl(&OpenclFoundationHandle, "HUA", deviceName); - if (status != static_cast(CL_SUCCESS)) { - VPE_LOGE("%{public}s, Error: setupOpencl status=%{public}d\n", __FUNCTION__, status); - } - OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Connect(); - VPE_LOGI("VPE Framework connect and load SA!"); - OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Disconnect(); - } - context.clContext = reinterpret_cast(OpenclFoundationHandle); -} - -void ColorSpaceConverterFwk::OpenGLInit() -{ - context.glDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY); - if (context.glDisplay == EGL_NO_DISPLAY || eglGetError() != EGL_SUCCESS) { - VPE_LOGE("ColorSpaceConverterFwk Get display failed!"); - } - EGLint major; - EGLint minor; - if (eglInitialize(context.glDisplay, &major, &minor) == EGL_FALSE || eglGetError() != EGL_SUCCESS) { - VPE_LOGE("ColorSpaceConverterFwk eglInitialize failed!"); - } + eglInitializedFwk_ = true; } ColorSpaceConverterFwk::ColorSpaceConverterFwk(std::shared_ptr openglContext, @@ -96,7 +66,11 @@ ColorSpaceConverterFwk::~ColorSpaceConverterFwk() } } impls_.clear(); - CleanOpencl(context.clContext); + if (eglInitializedFwk_) { + AlgorithmEnvironment::Get().DeinitializeCl(); + AlgorithmEnvironment::Get().DeinitializeGl(); + eglInitializedFwk_ = false; + } Extension::ExtensionManager::GetInstance().DecreaseInstance(); } diff --git a/framework/algorithm/colorspace_converter/include/colorspace_converter_fwk.h b/framework/algorithm/colorspace_converter/include/colorspace_converter_fwk.h index 39885e38997832095fddc7bb36892779bdf5c142..618e613f1a8a49f724e570e08d4b9ae2dc0f27f3 100644 --- a/framework/algorithm/colorspace_converter/include/colorspace_converter_fwk.h +++ b/framework/algorithm/colorspace_converter/include/colorspace_converter_fwk.h @@ -56,12 +56,11 @@ public: private: VPEAlgoErrCode Init(const sptr &input, const sptr &output, VPEContext context); - void OpenGLInit(); - void OpenCLInit(); std::shared_ptr impl_ { nullptr }; std::optional parameter_ { std::nullopt }; std::atomic initialized_ { false }; + std::atomic eglInitializedFwk_ { false }; Extension::ExtensionInfo extensionInfo_; std::map< std::tuple, diff --git a/framework/algorithm/common/algorithm_common.cpp b/framework/algorithm/common/algorithm_common.cpp index c9711ddd8e235fd9174c4206749e6b297c7c540e..d02670c6f8a821eccef91e0ea22b35fa5f61c1bf 100644 --- a/framework/algorithm/common/algorithm_common.cpp +++ b/framework/algorithm/common/algorithm_common.cpp @@ -22,24 +22,6 @@ namespace OHOS { namespace Media { namespace VideoProcessingEngine { -int SetupOpengl(std::shared_ptr &openglHandle) -{ - std::shared_ptr openglContextPtr = std::make_shared(); - openglContextPtr->display = eglGetDisplay(EGL_DEFAULT_DISPLAY); - if (openglContextPtr->display == EGL_NO_DISPLAY || eglGetError() != EGL_SUCCESS) { - VPE_LOGE("Get display failed!"); - return VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED; - } - EGLint major; - EGLint minor; - if (eglInitialize(openglContextPtr->display, &major, &minor) == EGL_FALSE || eglGetError() != EGL_SUCCESS) { - VPE_LOGE("eglInitialize failed!"); - return VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED; - } - openglHandle = openglContextPtr; - return static_cast(VPE_ALGO_ERR_OK); -} - VPEAlgoErrCode ColorSpaceDescription::Create(const sptr &buffer, ColorSpaceDescription &desc) { CHECK_AND_RETURN_RET_LOG(nullptr != buffer, VPE_ALGO_ERR_INVALID_VAL, "Get an invalid buffer"); diff --git a/framework/algorithm/common/algorithm_environment.cpp b/framework/algorithm/common/algorithm_environment.cpp new file mode 100644 index 0000000000000000000000000000000000000000..d5f9b83df35f0cca8c3be1a074796a171876a073 --- /dev/null +++ b/framework/algorithm/common/algorithm_environment.cpp @@ -0,0 +1,207 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "algorithm_environment.h" +#include "vpe_log.h" + +using namespace OHOS::Media::VideoProcessingEngine; + +AlgorithmEnvironment& AlgorithmEnvironment::Get() +{ + static AlgorithmEnvironment instance{}; + return instance; +} + +// OpenCL environment init +VPEAlgoErrCode AlgorithmEnvironment::InitializeCl() +{ + std::lock_guard lock(lock_); + return InitializeClLocked(); +} + +VPEAlgoErrCode AlgorithmEnvironment::DeinitializeCl() +{ + std::lock_guard lock(lock_); + return DeinitializeClLocked(); +} + +VPEAlgoErrCode AlgorithmEnvironment::InitializeClLocked() +{ + if (referenceCountCl_ > 0) [[likely]] { + VPE_LOGD("already init(referenceCountCl_:%{public}d)", referenceCountCl_); + referenceCountCl_++; + return VPE_ALGO_ERR_OK; + } + VPE_LOGD("InitializeClLocked start to initialize..."); + VPEAlgoErrCode result = InitializeClEnvLocked(); + VPE_LOGD("InitializeClEnvLocked ret:%{public}d", result); + if (result == VPE_ALGO_ERR_OK) [[likely]] { + referenceCountCl_++; + } + return result; +} + +VPEAlgoErrCode AlgorithmEnvironment::DeinitializeClLocked() +{ + if (referenceCountCl_ > 1) [[likely]] { + VPE_LOGD("environment is still in use(referenceCountCl_:%{public}d)", referenceCountCl_); + referenceCountCl_--; + return VPE_ALGO_ERR_OK; + } + VPE_LOGD("start to DeinitializeClLocked..."); + VPEAlgoErrCode result = DeinitializeClEnvLocked(); + VPE_LOGD("DeinitializeClEnvLocked ret:%{public}d", result); + if (result == VPE_ALGO_ERR_OK) [[likely]] { + if (referenceCountCl_ <= 0) { + VPE_LOGE("referenceCountCl_ is less than 1, no need to deinitialize"); + return VPE_ALGO_ERR_INVALID_OPERATION; + } + referenceCountCl_--; + } + return result; +} + +VPEAlgoErrCode AlgorithmEnvironment::OpenCLInit() +{ + void *OpenclFoundationHandle = nullptr; + std::string path = "/sys_prod/lib64/VideoProcessingEngine/libaihdr_engine.so"; + auto ret = access(path.c_str(), F_OK); + if (ret != 0) { + VPE_LOGW("access = %d path = %s", ret, path.c_str()); + } else { + constexpr int DEVICE_NAME_LENGTH = 32; // 32 max name length + char deviceName[DEVICE_NAME_LENGTH]; + auto status = SetupOpencl(&OpenclFoundationHandle, "HUA", deviceName); + CHECK_AND_RETURN_RET_LOG(status == static_cast(CL_SUCCESS), + VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, + "GetOpenCLContext SetupOpencl fail!"); + } + openclContext_ = reinterpret_cast(OpenclFoundationHandle); + return VPE_ALGO_ERR_OK; +} +VPEAlgoErrCode AlgorithmEnvironment::InitializeClEnvLocked() +{ + return OpenCLInit(); +} + +void AlgorithmEnvironment::OpenCLDeinit() +{ + if (openclContext_ == nullptr) { + VPE_LOGI("OpenCLDeinit fail (openclContext_ null)!\n"); + return; + } + CleanOpencl(openclContext_); + openclContext_ = nullptr; +} +VPEAlgoErrCode AlgorithmEnvironment::DeinitializeClEnvLocked() +{ + OpenCLDeinit(); + return VPE_ALGO_ERR_OK; +} + +ClContext* AlgorithmEnvironment::GetClContext() +{ + std::lock_guard lock(lock_); + VPEAlgoErrCode ret = InitializeClLocked(); + CHECK_AND_LOG(ret == VPE_ALGO_ERR_OK, + "AlgorithmEnvironment::GetClContext failed (InitializeClLocked ret:%{public}d !", ret); + return openclContext_; +} + +// OpenGL environment init +VPEAlgoErrCode AlgorithmEnvironment::InitializeGl() +{ + std::lock_guard lock(lock_); + return InitializeGlLocked(); +} + +VPEAlgoErrCode AlgorithmEnvironment::DeinitializeGl() +{ + std::lock_guard lock(lock_); + return DeinitializeGlLocked(); +} + +VPEAlgoErrCode AlgorithmEnvironment::InitializeGlLocked() +{ + if (referenceCountGl_ > 0) [[likely]] { + VPE_LOGD("already init(referenceCountGl_:%{public}d)", referenceCountGl_); + referenceCountGl_++; + return VPE_ALGO_ERR_OK; + } + VPE_LOGD("start to InitializeGlLocked..."); + VPEAlgoErrCode result = InitializeGlEnvLocked(); + VPE_LOGD("InitializeGlEnvLocked ret:%{public}d", result); + if (result == VPE_ALGO_ERR_OK) [[likely]] { + referenceCountGl_++; + } + return result; +} + +VPEAlgoErrCode AlgorithmEnvironment::DeinitializeGlLocked() +{ + if (referenceCountGl_ > 1) [[likely]] { + VPE_LOGD("environment is still in use(referenceCountGl_:%{public}d)", referenceCountGl_); + referenceCountGl_--; + return VPE_ALGO_ERR_OK; + } + VPE_LOGD("start to DeinitializeGlLocked..."); + VPEAlgoErrCode result = DeinitializeGlEnvLocked(); + VPE_LOGD("DeinitializeGlEnvLocked ret:%{public}d", result); + if (result == VPE_ALGO_ERR_OK) [[likely]] { + if (referenceCountGl_ <= 0) { + VPE_LOGE("referenceCountGl_ is less than 1, no need to deinitialize"); + return VPE_ALGO_ERR_INVALID_OPERATION; + } + referenceCountGl_--; + } + return result; +} + +VPEAlgoErrCode AlgorithmEnvironment::OpenGLInit() +{ + auto status = SetupOpengl(openglContext_); + CHECK_AND_RETURN_RET_LOG(status == static_cast(VPE_ALGO_ERR_OK), + VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED, + "OpenGLInit SetupOpengl fail!"); + return VPE_ALGO_ERR_OK; +} +VPEAlgoErrCode AlgorithmEnvironment::InitializeGlEnvLocked() +{ + return OpenGLInit(); +} + +void AlgorithmEnvironment::OpenGLDeinit() +{ + if (openglContext_ == nullptr) { + VPE_LOGI("OpenGLDeinit fail (openglContext_ null)!\n"); + return; + } + CleanOpenGL(openglContext_); + openglContext_ = nullptr; +} +VPEAlgoErrCode AlgorithmEnvironment::DeinitializeGlEnvLocked() +{ + OpenGLDeinit(); + return VPE_ALGO_ERR_OK; +} + +std::shared_ptr AlgorithmEnvironment::GetGLContext() +{ + std::lock_guard lock(lock_); + VPEAlgoErrCode ret = InitializeGlLocked(); + CHECK_AND_LOG(ret == VPE_ALGO_ERR_OK, + "AlgorithmEnvironment::GetGLContext failed (InitializeGlLocked ret:%{public}d !", ret); + return openglContext_; +} \ No newline at end of file diff --git a/framework/algorithm/common/algorithm_video_impl.cpp b/framework/algorithm/common/algorithm_video_impl.cpp index b4e2b5cc1fc6310c57b0b295a16cee68a4838ce0..90262fcad2175b7f3a7c1c3d4917ca0564fda711 100644 --- a/framework/algorithm/common/algorithm_video_impl.cpp +++ b/framework/algorithm/common/algorithm_video_impl.cpp @@ -30,6 +30,7 @@ using namespace std::chrono_literals; namespace { constexpr uint32_t WAIT_FOR_EVER = std::numeric_limits::max(); constexpr uint32_t BUFFER_QUEUE_SIZE = 5; +constexpr uint32_t AUTO_DISABLE_CHECK_COUNT = 5; std::string ToString(const sptr& buffer) { @@ -81,7 +82,7 @@ VPEAlgoErrCode VpeVideoImpl::SetOutputSurface(const sptr& surface) std::lock_guard producerLock(producerLock_); if (producer_ != nullptr) { if (producer_->GetUniqueId() == surface->GetUniqueId()) { - VPE_LOGD("Oops! The same surface(%{public}llu)!", surface->GetUniqueId()); + VPE_LOGD("Oops! The same surface(%" PRIu64 ")", surface->GetUniqueId()); return VPE_ALGO_ERR_OK; } producer_->UnRegisterReleaseListener(); @@ -90,12 +91,12 @@ VPEAlgoErrCode VpeVideoImpl::SetOutputSurface(const sptr& surface) surface->UnRegisterReleaseListener(); GSError err = surface->RegisterReleaseListener([this](sptr&) { return OnProducerBufferReleased(); }); CHECK_AND_RETURN_RET_LOG(err == GSERROR_OK, VPE_ALGO_ERR_UNKNOWN, "RegisterReleaseListener failed!"); - VPE_LOGI("Set output(%{public}llu) buffer queue size to %{public}u", surface->GetUniqueId(), BUFFER_QUEUE_SIZE); + VPE_LOGI("Set output(%" PRIu64 ") buffer queue size to %{public}u", surface->GetUniqueId(), BUFFER_QUEUE_SIZE); surface->SetQueueSize(BUFFER_QUEUE_SIZE); surface->Connect(); surface->CleanCache(); CHECK_AND_RETURN_RET_LOG(AttachAndRefreshProducerBuffers(surface), VPE_ALGO_ERR_UNKNOWN, - "Failed to attach buffers to new output surface(%{public}llu)!", surface->GetUniqueId());; + "Failed to attach buffers to new output surface(%" PRIu64 ")!", surface->GetUniqueId());; if (state_.load() != VPEState::IDLE) { cvTrigger_.notify_one(); } @@ -602,7 +603,7 @@ VPEAlgoErrCode VpeVideoImpl::RenderOutputBuffer(uint32_t index, int64_t renderTi std::lock_guard producerLock(producerLock_); CHECK_AND_RETURN_RET_LOG(producer_ != nullptr, VPE_ALGO_ERR_INVALID_OPERATION, "Output surface is null!"); auto ret = producer_->FlushBuffer(bufferInfo.buffer, -1, flushcfg); - VPE_LOGD("producer_(%{public}llu)->FlushBuffer({ %{public}s })=%{public}s flushBQ=%{public}zu", + VPE_LOGD("producer_(%" PRIu64 ")->FlushBuffer({ %{public}s })=%{public}s flushBQ=%{public}zu", producer_->GetUniqueId(), ToString(bufferInfo.buffer).c_str(), AlgorithmUtils::ToString(ret).c_str(), flushBufferQueue_.size() + 1); if (ret != GSERROR_OK) { @@ -658,7 +659,7 @@ bool VpeVideoImpl::RequestBuffer(SurfaceBufferInfo& bufferInfo, GSError& errorCo CHECK_AND_RETURN_RET_LOG(hasConsumer_.load(), false, "Input surface is null!"); errorCode = producer_->RequestBuffer(bufferInfo.buffer, bufferInfo.fence, requestCfg_); if (errorCode != GSERROR_OK || bufferInfo.buffer == nullptr) { - VPE_EX_LOGW(logInfos, "Failed to producer_(%{public}llu)->RequestBuffer(requestCfg={ %{public}s }), " + VPE_EX_LOGW(logInfos, "Failed to producer_(%" PRIu64 ")->RequestBuffer(requestCfg={ %{public}s }), " "ret:%{public}s", producer_->GetUniqueId(), ToString(requestCfg_).c_str(), AlgorithmUtils::ToString(errorCode).c_str()); return false; @@ -666,7 +667,7 @@ bool VpeVideoImpl::RequestBuffer(SurfaceBufferInfo& bufferInfo, GSError& errorCo producerBufferQueue_.push(bufferInfo); AddBufferToCache(bufferInfo); if (!isEnable_.load()) { - VPE_EX_LOGD(logInfos, "producer_(%{public}llu)->RequestBuffer({ %{public}s }) and try to release.", + VPE_EX_LOGD(logInfos, "producer_(%" PRIu64 ")->RequestBuffer({ %{public}s }) and try to release.", producer_->GetUniqueId(), ToString(bufferInfo.buffer).c_str()); auto it = attachBufferIDs_.find(bufferInfo.buffer->GetSeqNum()); if (it != attachBufferIDs_.end()) { @@ -680,7 +681,7 @@ bool VpeVideoImpl::RequestBuffer(SurfaceBufferInfo& bufferInfo, GSError& errorCo }, ADD_VPE_LOG_INFO(logInfos)); } } else { - VPE_EX_LOGD(logInfos, "producer_(%{public}llu)->RequestBuffer({ %{public}s })", producer_->GetUniqueId(), + VPE_EX_LOGD(logInfos, "producer_(%" PRIu64 ")->RequestBuffer({ %{public}s })", producer_->GetUniqueId(), ToString(bufferInfo.buffer).c_str()); if (attachBufferQueue_.empty()) { return true; @@ -723,9 +724,9 @@ bool VpeVideoImpl::AttachAndRefreshProducerBuffers(const sptr& producer for (auto& [index, bufferInfo] : producerBufferCache_) { auto errorCode = producer->AttachBufferToQueue(bufferInfo.buffer); CHECK_AND_RETURN_RET_LOG(errorCode == GSERROR_OK, false, - "Failed to producer(%{public}llu)->AttachBufferToQueue({ %{public}s })=%{public}s", producer->GetUniqueId(), + "Failed to producer(%" PRIu64 ")->AttachBufferToQueue({ %{public}s })=%{public}s", producer->GetUniqueId(), ToString(bufferInfo.buffer).c_str(), AlgorithmUtils::ToString(errorCode).c_str()); - VPE_LOGD("producer(%{public}llu)->AttachBufferToQueue({ %{public}s })", producer->GetUniqueId(), + VPE_LOGD("producer(%" PRIu64 ")->AttachBufferToQueue({ %{public}s })", producer->GetUniqueId(), ToString(bufferInfo.buffer).c_str()); } std::set producerBufferIDs{}; @@ -845,11 +846,7 @@ void VpeVideoImpl::ProcessBuffers() break; } if (isEnable_.load()) { - if (!ProcessBuffer(srcBufferInfo, dstBufferInfo) && IsDisableAfterProcessFail()) { - VPE_LOGD("Dsiable because failed to process !"); - Disable(); - continue; - } + ProcessBuffer(srcBufferInfo, dstBufferInfo); } else { BypassBuffer(srcBufferInfo, dstBufferInfo); } @@ -884,7 +881,25 @@ bool VpeVideoImpl::GetConsumerAndProducerBuffer(SurfaceBufferInfo& srcBufferInfo return true; } -bool VpeVideoImpl::ProcessBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo) +void VpeVideoImpl::ProcessBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo) +{ + if (DoProcessBuffer(srcBufferInfo, dstBufferInfo)) { + processFailCount_ = 0; + return; + } + if (!IsDisableAfterProcessFail()) { + VPE_LOGD("Do NOT disable feature after failed to process."); + return; + } + if (processFailCount_++ < AUTO_DISABLE_CHECK_COUNT) { + VPE_LOGD("processFailCount_:%{public}d", processFailCount_); + return; + } + VPE_LOGD("Dsiable because failed to process!"); + Disable(); +} + +bool VpeVideoImpl::DoProcessBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo) { dstBufferInfo.timestamp = srcBufferInfo.timestamp; auto errorCode = Process(srcBufferInfo.buffer, dstBufferInfo.buffer); @@ -917,7 +932,7 @@ void VpeVideoImpl::BypassBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferI ret1 = producer_->DetachBufferFromQueue(dstBufferInfo.buffer); ret2 = producer_->AttachBufferToQueue(srcBufferInfo.buffer); SetRequestCfgLocked(srcBufferInfo.buffer); - VPE_LOGD("producer_(%{public}llu)->DetachBufferFromQueue({ %{public}s })=%{public}s, " + VPE_LOGD("producer_(%" PRIu64 ")->DetachBufferFromQueue({ %{public}s })=%{public}s, " "AttachBufferToQueue({ %{public}s })=%{public}s requestCfg:{ %{public}s } ", producer_->GetUniqueId(), ToString(dstBufferInfo.buffer).c_str(), AlgorithmUtils::ToString(ret1).c_str(), ToString(srcBufferInfo.buffer).c_str(), AlgorithmUtils::ToString(ret2).c_str(), @@ -1153,6 +1168,10 @@ VPEAlgoErrCode VpeVideoImpl::ExecuteWithCheck(std::function&& checke void VpeVideoImpl::ConsumerListener::OnBufferAvailable() { + if (owner_.expired()) { + VPE_LOGE("Video processing is invalid!"); + return; + } std::shared_ptr owner = owner_.lock(); if (owner == nullptr) { VPE_LOGE("Video processing is null!"); diff --git a/framework/algorithm/common/image_openglsetup.cpp b/framework/algorithm/common/image_openglsetup.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a74fcd3c511da1996efcafa0eb334aca29aaaf01 --- /dev/null +++ b/framework/algorithm/common/image_openglsetup.cpp @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "image_openglsetup.h" +#include +#include +#include "securec.h" +#include "vpe_log.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +int SetupOpengl(std::shared_ptr &openglHandle) +{ + std::shared_ptr openglContextPtr = std::make_shared(); + openglContextPtr->display = eglGetDisplay(EGL_DEFAULT_DISPLAY); + if (openglContextPtr->display == EGL_NO_DISPLAY || eglGetError() != EGL_SUCCESS) { + VPE_LOGE("Get display failed!"); + return VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED; + } + EGLint major; + EGLint minor; + if (eglInitialize(openglContextPtr->display, &major, &minor) == EGL_FALSE || eglGetError() != EGL_SUCCESS) { + VPE_LOGE("eglInitialize failed!"); + return VPE_ALGO_ERR_OPERATION_NOT_SUPPORTED; + } + openglHandle = openglContextPtr; + return static_cast(VPE_ALGO_ERR_OK); +} +void CleanOpenGL(std::shared_ptr &openglHandle) +{ + if ((openglHandle != nullptr) && (openglHandle->display != EGL_NO_DISPLAY)) { + eglTerminate(openglHandle->display); + } +} +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/framework/algorithm/common/include/algorithm_environment.h b/framework/algorithm/common/include/algorithm_environment.h new file mode 100644 index 0000000000000000000000000000000000000000..d12b0a704e9162f83334736809d727a705f30e81 --- /dev/null +++ b/framework/algorithm/common/include/algorithm_environment.h @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2024 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ALGORITHM_ENVIRONMENT_H +#define ALGORITHM_ENVIRONMENT_H + +#include +#include + +#include "image_openclsetup.h" +#include "image_openglsetup.h" +#include "algorithm_errors.h" +#include "utils.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class AlgorithmEnvironment { +public: + static AlgorithmEnvironment& Get(); + + VPEAlgoErrCode InitializeCl(); + VPEAlgoErrCode DeinitializeCl(); + + VPEAlgoErrCode InitializeGl(); + VPEAlgoErrCode DeinitializeGl(); + + ClContext* GetClContext(); + std::shared_ptr GetGLContext(); + +private: + AlgorithmEnvironment() = default; + virtual ~AlgorithmEnvironment() = default; + AlgorithmEnvironment(const AlgorithmEnvironment&) = delete; + AlgorithmEnvironment& operator=(const AlgorithmEnvironment&) = delete; + AlgorithmEnvironment(AlgorithmEnvironment&&) = delete; + AlgorithmEnvironment& operator=(AlgorithmEnvironment&&) = delete; + + VPEAlgoErrCode InitializeClLocked(); + VPEAlgoErrCode DeinitializeClLocked(); + VPEAlgoErrCode InitializeClEnvLocked(); + VPEAlgoErrCode DeinitializeClEnvLocked(); + VPEAlgoErrCode OpenCLInit(); + void OpenCLDeinit(); + + VPEAlgoErrCode InitializeGlLocked(); + VPEAlgoErrCode DeinitializeGlLocked(); + VPEAlgoErrCode InitializeGlEnvLocked(); + VPEAlgoErrCode DeinitializeGlEnvLocked(); + VPEAlgoErrCode OpenGLInit(); + void OpenGLDeinit(); + + std::mutex lock_{}; + // Guarded by lock_ begin + uint32_t referenceCountCl_{}; + uint32_t referenceCountGl_{}; + // Guarded by lock_ end + + ClContext *openclContext_{}; + std::shared_ptr openglContext_{}; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // ALGORITHM_ENVIRONMENT_H diff --git a/framework/algorithm/common/include/algorithm_video_impl.h b/framework/algorithm/common/include/algorithm_video_impl.h index 4704a9d2ff1fc680417b73f2c1520b839022cef5..775eff2f41c0ffed8645c73a0ff2d8357773d633 100644 --- a/framework/algorithm/common/include/algorithm_video_impl.h +++ b/framework/algorithm/common/include/algorithm_video_impl.h @@ -137,7 +137,8 @@ private: void CheckAndUpdateProducerCache(); void ProcessBuffers(); bool GetConsumerAndProducerBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo); - bool ProcessBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo); + void ProcessBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo); + bool DoProcessBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo); void BypassBuffer(SurfaceBufferInfo& srcBufferInfo, SurfaceBufferInfo& dstBufferInfo); void OutputBuffer(const SurfaceBufferInfo& bufferInfo, const SurfaceBufferInfo& bufferImage, std::function&& getReadyToRender, const LogInfo& logInfo); @@ -204,6 +205,7 @@ private: // Guarded by taskLock_ begin std::atomic isProcessing_{false}; // Guarded by taskLock_ end + uint32_t processFailCount_{}; mutable std::mutex consumerBufferLock_{}; // Guarded by consumerBufferLock_ begin diff --git a/framework/algorithm/common/include/hdr_vivid_metadata_v1.h b/framework/algorithm/common/include/hdr_vivid_metadata_v1.h index 3a8c16408810733114e2233d03bc7a646988556a..1e29a4ed6314e44eebed7ea5e48470ea4160ffc7 100644 --- a/framework/algorithm/common/include/hdr_vivid_metadata_v1.h +++ b/framework/algorithm/common/include/hdr_vivid_metadata_v1.h @@ -64,6 +64,7 @@ struct HdrVividMetadataV1 { unsigned int colorSaturationNum; // 对应标准中color_saturation_enable_num unsigned int colorSaturationGain[16]; // 对应标准中color_saturation_enable_gain std::vector gtmLut; // 存储aihdr生成的lut + int videoType; // 存储Camera区分视频源是否是拍照预览或LivePhoto,1表示是 }; // C-Link paramter typedef struct HwDisplayMeta { diff --git a/framework/algorithm/common/include/image_openclsetup.h b/framework/algorithm/common/include/image_openclsetup.h index f639d706db3cdbba5ff8228e5180dd1f6e490fe0..1b351ebec53113739d0b79a9c47dc5ad989c734f 100644 --- a/framework/algorithm/common/include/image_openclsetup.h +++ b/framework/algorithm/common/include/image_openclsetup.h @@ -39,7 +39,6 @@ struct ClContext { }; #define INFO_BUFFER_LENGTH 128 -extern "C" { namespace OHOS { namespace Media { namespace VideoProcessingEngine { @@ -48,6 +47,5 @@ void CleanOpencl(ClContext *pCtx); } // namespace VideoProcessingEngine } // namespace Media } // namespace OHOS -} #endif // IMAGE_OPENCL_SETUP_H diff --git a/framework/algorithm/common/include/image_openglsetup.h b/framework/algorithm/common/include/image_openglsetup.h new file mode 100644 index 0000000000000000000000000000000000000000..37217bf3528f8e97a31b350b3f66b75eb40103e9 --- /dev/null +++ b/framework/algorithm/common/include/image_openglsetup.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef IMAGE_OPEGCL_SETUP_H +#define IMAGE_OPEGCL_SETUP_H + +#include +#include +#include +#include "vpe_context.h" + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +int SetupOpengl(std::shared_ptr &openglHandle); +void CleanOpenGL(std::shared_ptr &openglHandle); +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // IMAGE_OPEGCL_SETUP_H diff --git a/framework/algorithm/common/include/vpe_model_path.h b/framework/algorithm/common/include/vpe_model_path.h index 4fbea901ed18a6d5899f3a05b0a517d073eef9ef..b4226ad648431a6c9dc0adb45e33fb291ce81737 100644 --- a/framework/algorithm/common/include/vpe_model_path.h +++ b/framework/algorithm/common/include/vpe_model_path.h @@ -82,6 +82,7 @@ enum VpeModelKey { VIDEO_AIHDR_ALGO_CONFIG, VIDEO_AIHDR_MODEL, IMAGE_SR_10bit_CONVERT_KERNEL, + ESR_DCT_CL_KERNEL, VPE_MODEL_KEY_NUM, }; @@ -147,6 +148,7 @@ const std::array VPE_MODEL_PATHS = { "/sys_prod/etc/VideoProcessingEngine/video_aihdr_algo_config.xml", "/sys_prod/etc/VideoProcessingEngine/GTM_AIHDR.omc", "/sys_prod/etc/VideoProcessingEngine/10bitAisr.bin", + "/sys_prod/etc/VideoProcessingEngine/ESR_DCT_Kernel.bin", }; } // namespace VideoProcessingEngine } // namespace Media diff --git a/framework/algorithm/detail_enhancer/detail_enhancer_image_fwk.cpp b/framework/algorithm/detail_enhancer/detail_enhancer_image_fwk.cpp index 8fcbafadefe935c55d333bcc1d7d0731aa7fbc1e..5f7f1adceb85f540098af54cabff50ddbb22d280 100644 --- a/framework/algorithm/detail_enhancer/detail_enhancer_image_fwk.cpp +++ b/framework/algorithm/detail_enhancer/detail_enhancer_image_fwk.cpp @@ -17,6 +17,7 @@ #include +#include "algorithm_utils.h" #include "detail_enhancer_common.h" #include "extension_manager.h" #include "native_buffer.h" @@ -52,6 +53,8 @@ const std::unordered_set SUPPORTED_FORMATS = { OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P, // YU12 OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_P, // YV12 OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, // RGBA_1010102 + OHOS::GRAPHIC_PIXEL_FMT_YCBCR_P010, // YCBCR420 semi-planar 10bit packed format + OHOS::GRAPHIC_PIXEL_FMT_YCRCB_P010, // YCRCB420 semi-planar 10bit packed format }; const std::vector> SUPER_LEVEL_TARGET_RECT = { {1, 1104, 1, 848}, @@ -79,11 +82,15 @@ DetailEnhancerImageFwk::DetailEnhancerImageFwk(int type) { type_ = (type >= IMAGE && type <= VIDEO) ? type : IMAGE; Extension::ExtensionManager::GetInstance().IncreaseInstance(); + VideoProcessingManager::GetInstance().Connect(); + RegisterServerListener(); } DetailEnhancerImageFwk::~DetailEnhancerImageFwk() { + UnregisterServerListener(); Clear(); + VideoProcessingManager::GetInstance().Disconnect(); Extension::ExtensionManager::GetInstance().DecreaseInstance(); } @@ -95,9 +102,37 @@ std::shared_ptr DetailEnhancerImage::Create(int type) return impl; } +bool DetailEnhancerImageFwk::RegisterServerListener() +{ + if (serverListener_ != nullptr) { + return true; + } + auto serverListener = std::make_shared(*this); + CHECK_AND_RETURN_RET_LOG(serverListener != nullptr, false, "Failed to create ServerListener!"); + VPEAlgoErrCode err = VideoProcessingManager::GetInstance().RegisterServerListener(serverListener); + CHECK_AND_RETURN_RET_LOG(err == VPE_ALGO_ERR_OK, false, "Failed to register server listener, return %{public}s!", + AlgorithmUtils::ToString(err).c_str()); + serverListener_ = serverListener; + VPE_LOGD("success."); + return true; +} + +bool DetailEnhancerImageFwk::UnregisterServerListener() +{ + if (serverListener_ == nullptr) { + return true; + } + VPEAlgoErrCode err = VideoProcessingManager::GetInstance().UnregisterServerListener(serverListener_); + serverListener_ = nullptr; + CHECK_AND_RETURN_RET_LOG(err == VPE_ALGO_ERR_OK, false, "Failed to unregister server listener, return %{public}s!", + AlgorithmUtils::ToString(err).c_str()); + VPE_LOGD("success."); + return true; +} + std::shared_ptr DetailEnhancerImageFwk::GetAlgorithm(int level) { - if (level < DETAIL_ENH_LEVEL_NONE || level > DETAIL_ENH_LEVEL_VIDEO) { + if (level < DETAIL_ENH_LEVEL_NONE || level > DETAIL_ENH_LEVEL_HIGH_SLR) { VPE_LOGE("Invalid level:%{public}d", level); return nullptr; } @@ -178,6 +213,12 @@ int DetailEnhancerImageFwk::EvaluateTargetLevel(const sptr& input std::abs(static_cast(widthRatio * inputW + 0.5) - static_cast(heightRatio * inputW + 0.5)) <= 2 && // 0.5 means rounding, 2 means two pixels std::abs(static_cast(widthRatio * inputH + 0.5) - static_cast(heightRatio * inputH + 0.5)) <= 2) { + VPE_LOGI("Current format %{public}d", input->GetFormat()); + if (input->GetFormat() == OHOS::GRAPHIC_PIXEL_FMT_YCBCR_P010 || + input->GetFormat() == OHOS::GRAPHIC_PIXEL_FMT_YCRCB_P010) { + VPE_LOGI("Prioritize using slr algo when scaling down scenes"); + return DETAIL_ENH_LEVEL_HIGH_SLR; + } VPE_LOGI("Prioritize using extream vision algo when scaling down scenes"); return DETAIL_ENH_LEVEL_HIGH; } @@ -236,7 +277,7 @@ VPEAlgoErrCode DetailEnhancerImageFwk::DoProcess(const sptr& inpu float widthRatio = static_cast(output->GetWidth()) / static_cast(input->GetWidth()); float heightRatio = static_cast(output->GetHeight()) / static_cast(input->GetHeight()); int targetLevel = EvaluateTargetLevel(input, output, widthRatio, heightRatio); - if (targetLevel < DETAIL_ENH_LEVEL_HIGH_AISR && + if ((targetLevel < DETAIL_ENH_LEVEL_HIGH_AISR || targetLevel == DETAIL_ENH_LEVEL_HIGH_SLR) && std::fabs(widthRatio - 1.0f) < EPSILON && std::fabs(heightRatio - 1.0f) < EPSILON) { VPE_LOGI("The current scaling ratio is 1.0, and the algorithm is not AISR, so copy it directly."); return (memcpy_s(output->GetVirAddr(), output->GetSize(), input->GetVirAddr(), input->GetSize()) == EOK) ? @@ -247,6 +288,9 @@ VPEAlgoErrCode DetailEnhancerImageFwk::DoProcess(const sptr& inpu auto algoImpl = GetAlgorithm(level); if (algoImpl == nullptr) { VPE_LOGE("Get Algorithm impl for %{public}d failed!", level); + if (level == DETAIL_ENH_LEVEL_HIGH_SLR) { + level -= 3; // 3 level step to EVE + } continue; } parameter_.level = static_cast((level == DETAIL_ENH_LEVEL_HIGH_AISR) ? @@ -263,6 +307,9 @@ VPEAlgoErrCode DetailEnhancerImageFwk::DoProcess(const sptr& inpu VPE_LOGD("AISR processed failed, try to process by EVE"); } else if (level > DETAIL_ENH_LEVEL_NONE) { VPE_LOGW("Failed to process with level %{public}d", level); + } else if (level == DETAIL_ENH_LEVEL_HIGH_SLR) { + VPE_LOGW("Failed to process with level %{public}d", level); + level -= 3; // 3 level step to EVE } else { VPE_LOGE("Failed to process with detail enhancer"); return VPE_ALGO_ERR_UNKNOWN; @@ -328,6 +375,12 @@ VPEAlgoErrCode DetailEnhancerImageFwk::ProcessAlgorithm(const std::shared_ptr -#include "detail_enhancer_image.h" #include "detail_enhancer_base.h" +#include "detail_enhancer_image.h" +#include "ivideo_processing_server_listener.h" namespace OHOS { namespace Media { @@ -40,6 +41,23 @@ public: VPEAlgoErrCode ResetProtectionStatus() final; private: + class ServerListener : public IVideoProcessingServerListener { + public: + ServerListener(DetailEnhancerImageFwk& owner) : owner_(owner) {} + virtual ~ServerListener() = default; + ServerListener(const ServerListener&) = delete; + ServerListener& operator=(const ServerListener&) = delete; + ServerListener(ServerListener&&) = delete; + ServerListener& operator=(ServerListener&&) = delete; + + void OnServerDied() final; + + private: + DetailEnhancerImageFwk& owner_; + }; + + bool RegisterServerListener(); + bool UnregisterServerListener(); std::shared_ptr GetAlgorithm(int feature); std::shared_ptr CreateAlgorithm(int feature); bool IsValidProcessedObject(const sptr& input, const sptr& output); @@ -56,6 +74,7 @@ private: mutable std::mutex lock_{}; std::unordered_map> algorithms_{}; std::shared_ptr lastAlgorithm_{}; + std::shared_ptr serverListener_{}; int type_; std::atomic parameterUpdated{}; bool hasParameter_{}; diff --git a/framework/algorithm/extensions/skia/skia_impl.cpp b/framework/algorithm/extensions/skia/skia_impl.cpp index 090261100bb084149aac715d5c57455684991b25..4a2a7c91fdd8c1f5de991a5a5fc71b1547a12188 100644 --- a/framework/algorithm/extensions/skia/skia_impl.cpp +++ b/framework/algorithm/extensions/skia/skia_impl.cpp @@ -188,7 +188,6 @@ int CreateYUVPixmap(const sptr& buffer, std::array(buffer->GetStride()); } - return 0; } rowbyte[CHANNEL_Y] = planesInfo->planes[CHANNEL_Y].columnStride; diff --git a/framework/algorithm/metadata_generator/include/metadata_generator_fwk.h b/framework/algorithm/metadata_generator/include/metadata_generator_fwk.h index 7ed45db635a96bdaa68a87018dbdf4494c844aba..8d5786be670349baca9bf0672a080df8464df030 100644 --- a/framework/algorithm/metadata_generator/include/metadata_generator_fwk.h +++ b/framework/algorithm/metadata_generator/include/metadata_generator_fwk.h @@ -37,11 +37,11 @@ public: private: VPEAlgoErrCode Init(const sptr &input); - void OpenGLInit(); std::shared_ptr impl_ { nullptr }; MetadataGeneratorParameter parameter_; std::atomic initialized_ { false }; + std::atomic eglInitializedFwk_ { false }; Extension::ExtensionInfo extensionInfo_; VPEContext context; }; diff --git a/framework/algorithm/metadata_generator/metadata_generator_fwk.cpp b/framework/algorithm/metadata_generator/metadata_generator_fwk.cpp index 3018ba05fcece4562dfb4e46152cbd6d6f0d869d..4bc098deb9af9afd84079d85057c60bf85298e86 100644 --- a/framework/algorithm/metadata_generator/metadata_generator_fwk.cpp +++ b/framework/algorithm/metadata_generator/metadata_generator_fwk.cpp @@ -15,6 +15,7 @@ #include "metadata_generator_fwk.h" #include "video_processing_client.h" +#include "algorithm_environment.h" #include "extension_manager.h" #include "native_buffer.h" #include "surface_buffer.h" @@ -27,26 +28,15 @@ namespace Media { namespace VideoProcessingEngine { MetadataGeneratorFwk::MetadataGeneratorFwk() { - OpenGLInit(); + AlgorithmEnvironment::Get().InitializeGl(); + context.glDisplay = AlgorithmEnvironment::Get().GetGLContext()->display; + eglInitializedFwk_ = true; OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Connect(); VPE_LOGI("VPE Framework connect and load SA!"); OHOS::Media::VideoProcessingEngine::VideoProcessingManager::GetInstance().Disconnect(); Extension::ExtensionManager::GetInstance().IncreaseInstance(); } -void MetadataGeneratorFwk::OpenGLInit() -{ - context.glDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY); - if (context.glDisplay == EGL_NO_DISPLAY || eglGetError() != EGL_SUCCESS) { - VPE_LOGE("MetadataGeneratorFwk Get display failed!"); - } - EGLint major; - EGLint minor; - if (eglInitialize(context.glDisplay, &major, &minor) == EGL_FALSE || eglGetError() != EGL_SUCCESS) { - VPE_LOGE("MetadataGeneratorFwk eglInitialize failed!"); - } -} - MetadataGeneratorFwk::MetadataGeneratorFwk(std::shared_ptr openglContext) { if (openglContext != nullptr) { @@ -64,8 +54,9 @@ MetadataGeneratorFwk::~MetadataGeneratorFwk() impl_->Deinit(); impl_ = nullptr; } - if (context.glDisplay != EGL_NO_DISPLAY) { - eglTerminate(context.glDisplay); + if (eglInitializedFwk_) { + AlgorithmEnvironment::Get().DeinitializeGl(); + eglInitializedFwk_ = false; } Extension::ExtensionManager::GetInstance().DecreaseInstance(); } diff --git a/framework/capi/image_processing/detail_enhance_napi_formal.cpp b/framework/capi/image_processing/detail_enhance_napi_formal.cpp index 37e53ef76ef9f121c00d6fdcd28c00f77e5891de..b8bcc1a3d0ef62db85465cae64ecbf5dafa7fa36 100644 --- a/framework/capi/image_processing/detail_enhance_napi_formal.cpp +++ b/framework/capi/image_processing/detail_enhance_napi_formal.cpp @@ -302,6 +302,23 @@ std::unique_ptr VpeNapi::CreateDstPixelMap(PixelMap& source, const Ini return dstPixelMap; } +void VpeNapi::CopySurfaceBufferMetaData(const sptr& srcBuffer, + sptr& destBuffer) +{ + CHECK_AND_RETURN_LOG(srcBuffer != nullptr && destBuffer != nullptr, + "srcBuffer or destBuffer is nullptr"); + std::vector attrInfo{}; + std::vector keys{}; + if (srcBuffer->ListMetadataKeys(keys) == GSERROR_OK && !keys.empty()) { + for (size_t i = 0; i < keys.size(); i++) { + if (srcBuffer->GetMetadata(keys[i], attrInfo) == GSERROR_OK && !attrInfo.empty()) { + destBuffer->SetMetadata(keys[i], attrInfo); + } + attrInfo.clear(); + } + } +} + bool VpeNapi::ConvertPixelmapToSurfaceBuffer(const std::shared_ptr& pixelmap, sptr& bufferImpl) { @@ -369,7 +386,25 @@ std::shared_ptr VpeNapi::PrepareDstPixelMap(napi_env env, DetailEnhanc VPE_LOGD("res:w %{public}d, h %{public}d, -> w %{public}d, h %{public}d", context->inputPixelMap->GetWidth(), context->inputPixelMap->GetHeight(), static_cast(context->xArg), static_cast(context->yArg)); - std::unique_ptr outputPtr = context->inputPixelMap->Create(*context->inputPixelMap, opts); + std::unique_ptr outputPtr = nullptr; + if (context->inputPixelMap->GetPixelFormat() == PixelFormat::YCBCR_P010 || + context->inputPixelMap->GetPixelFormat() == PixelFormat::YCRCB_P010) { + opts.pixelFormat = context->inputPixelMap->GetPixelFormat(); + opts.useDMA = true; + ImageInfo srcImageInfo; + context->inputPixelMap->GetImageInfo(srcImageInfo); + opts.alphaType = srcImageInfo.alphaType; + + outputPtr = context->inputPixelMap->Create(opts); + if (outputPtr == nullptr) { + ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_INVALID_VALUE, "create failed"); + return nullptr; + } + OHOS::ColorManager::ColorSpace colorspace = context->inputPixelMap->InnerGetGrColorSpace(); + outputPtr->InnerSetColorSpace(colorspace); + } else { + outputPtr = context->inputPixelMap->Create(*context->inputPixelMap, opts); + } if (outputPtr == nullptr) { ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_INVALID_VALUE, "create failed"); return nullptr; @@ -410,11 +445,6 @@ std::shared_ptr VpeNapi::DetailEnhanceImpl(napi_env env, DetailEnhance VPE_LOGE("context == nullptr"); return nullptr; } - if (context->inputPixelMap->GetPixelFormat() == PixelFormat::YCBCR_P010 || - context->inputPixelMap->GetPixelFormat() == PixelFormat::YCRCB_P010) { - VPE_LOGI("not support P010"); - return context->inputPixelMap; - } if (!InitDetailAlgo(env)) { VPE_LOGE("init algo failed"); ThrowExceptionError(env, IMAGE_PROCESSING_ERROR_CREATE_FAILED, "init algo failed"); @@ -435,6 +465,7 @@ std::shared_ptr VpeNapi::DetailEnhanceImpl(napi_env env, DetailEnhance } auto output = GetSurfaceBufferFromDMAPixelMap(dstPixelMap); auto input = GetSurfaceBufferFromDMAPixelMap(context->inputPixelMap); + CopySurfaceBufferMetaData(input, output); CHECK_AND_RETURN_RET_LOG((g_detailEnh != nullptr && g_detailEnh->Process(input, output) == VPE_ALGO_ERR_OK), nullptr, "process failed"); return dstPixelMap; @@ -714,7 +745,7 @@ bool VpeNapi::ParseDetailImageParameter(napi_env env, napi_callback_info info, N static_cast(contrastContext_->inputPixelMap->GetHeight()) / static_cast(contrastContext_->curPixelmapArea.h)); - VPE_LOGI("res: %{public}d x %{public}d, curArea: %{public}d x %{public}d, ratio:%{puiblic}f, maxRatio:%{public}f", + VPE_LOGI("res: %{public}d x %{public}d, curArea: %{public}d x %{public}d, ratio:%{public}f, maxRatio:%{public}f", contrastContext_->inputPixelMap->GetWidth(), contrastContext_->inputPixelMap->GetHeight(), contrastContext_->curPixelmapArea.w, contrastContext_->curPixelmapArea.h, contrastContext_->curRatio, contrastContext_->maxRatio); diff --git a/framework/capi/image_processing/image_processing.cpp b/framework/capi/image_processing/image_processing.cpp index 72abce22da5c364580e8e8372b99c286ebda381d..f15bfbfc4087b9c22fba465858ee7c4385cb82e0 100644 --- a/framework/capi/image_processing/image_processing.cpp +++ b/framework/capi/image_processing/image_processing.cpp @@ -22,6 +22,7 @@ #include "image_processing_capi_capability.h" #include "image_environment_native.h" #include "image_processing_impl.h" +#include "algorithm_environment.h" using namespace OHOS::Media::VideoProcessingEngine; @@ -47,15 +48,19 @@ ImageProcessing_ErrorCode CallImageProcessing(OH_ImageProcessing* imageProcessor ImageProcessing_ErrorCode OH_ImageProcessing_InitializeEnvironment(void) { - CHECK_AND_RETURN_RET_LOG(ImageProcessingCapiCapability::Get().OpenCLInit() == IMAGE_PROCESSING_SUCCESS, - IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, "OpenCLInit failed!"); - CHECK_AND_RETURN_RET_LOG(ImageProcessingCapiCapability::Get().OpenGLInit() == IMAGE_PROCESSING_SUCCESS, - IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, "OpenGLInit failed!"); + CHECK_AND_RETURN_RET_LOG(AlgorithmEnvironment::Get().InitializeCl() == VPE_ALGO_ERR_OK, + IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, "InitializeCl failed!"); + CHECK_AND_RETURN_RET_LOG(AlgorithmEnvironment::Get().InitializeGl() == VPE_ALGO_ERR_OK, + IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, "InitializeGl failed!"); return ImageEnvironmentNative::Get().Initialize(); } ImageProcessing_ErrorCode OH_ImageProcessing_DeinitializeEnvironment(void) { + CHECK_AND_RETURN_RET_LOG(AlgorithmEnvironment::Get().DeinitializeCl() == VPE_ALGO_ERR_OK, + IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, "DeinitializeCl failed!"); + CHECK_AND_RETURN_RET_LOG(AlgorithmEnvironment::Get().DeinitializeGl() == VPE_ALGO_ERR_OK, + IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, "DeinitializeGl failed!"); return ImageEnvironmentNative::Get().Deinitialize(); } @@ -104,12 +109,16 @@ bool OH_ImageProcessing_IsMetadataGenerationSupported(const ImageProcessing_Colo ImageProcessing_ErrorCode OH_ImageProcessing_Create(OH_ImageProcessing** imageProcessor, int type) { - return OH_ImageProcessing::Create(imageProcessor, type, ImageProcessingCapiCapability::Get().GetOpenGLContext(), - ImageProcessingCapiCapability::Get().GetClContext()); + return OH_ImageProcessing::Create(imageProcessor, type, AlgorithmEnvironment::Get().GetGLContext(), + AlgorithmEnvironment::Get().GetClContext()); } ImageProcessing_ErrorCode OH_ImageProcessing_Destroy(OH_ImageProcessing* imageProcessor) { + CHECK_AND_RETURN_RET_LOG(AlgorithmEnvironment::Get().DeinitializeCl() == VPE_ALGO_ERR_OK, + IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, "DeinitializeCl failed!"); + CHECK_AND_RETURN_RET_LOG(AlgorithmEnvironment::Get().DeinitializeGl() == VPE_ALGO_ERR_OK, + IMAGE_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, "DeinitializeGl failed!"); return OH_ImageProcessing::Destroy(imageProcessor); } diff --git a/framework/capi/image_processing/image_processing_capi_capability.cpp b/framework/capi/image_processing/image_processing_capi_capability.cpp index 7098af828b6c508995a2f3d6eeb88c1f3f3be18c..ad808ead623579ffb3281569bc21ad893e964dae 100644 --- a/framework/capi/image_processing/image_processing_capi_capability.cpp +++ b/framework/capi/image_processing/image_processing_capi_capability.cpp @@ -14,6 +14,7 @@ */ #include "image_processing_capi_capability.h" +#include "algorithm_environment.h" using namespace OHOS::Media::VideoProcessingEngine; @@ -22,43 +23,15 @@ ImageProcessingCapiCapability& ImageProcessingCapiCapability::Get() static ImageProcessingCapiCapability instance{}; return instance; } - -ImageProcessing_ErrorCode ImageProcessingCapiCapability::OpenCLInit() -{ - void *OpenclFoundationHandle = nullptr; - std::string path = "/sys_prod/lib64/VideoProcessingEngine/libaihdr_engine.so"; - auto ret = access(path.c_str(), F_OK); - if (ret != 0) { - VPE_LOGW("access = %d path = %s", ret, path.c_str()); - } else { - constexpr int DEVICE_NAME_LENGTH = 32; // 32 max name length - char deviceName[DEVICE_NAME_LENGTH]; - auto status = SetupOpencl(&OpenclFoundationHandle, "HUA", deviceName); - CHECK_AND_RETURN_RET_LOG(status == static_cast(IMAGE_PROCESSING_SUCCESS), - IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, - "GetOpenCLContext SetupOpencl fail!"); - } - openclContext_ = reinterpret_cast(OpenclFoundationHandle); - return IMAGE_PROCESSING_SUCCESS; -} - -ImageProcessing_ErrorCode ImageProcessingCapiCapability::OpenGLInit() -{ - auto status = SetupOpengl(openglContext_); - CHECK_AND_RETURN_RET_LOG(status == static_cast(IMAGE_PROCESSING_SUCCESS), - IMAGE_PROCESSING_ERROR_UNSUPPORTED_PROCESSING, - "OpenGLInit SetupOpengl fail!"); - return IMAGE_PROCESSING_SUCCESS; -} - + ClContext* ImageProcessingCapiCapability::GetClContext() { - return openclContext_; + return AlgorithmEnvironment::Get().GetClContext(); } std::shared_ptr ImageProcessingCapiCapability::GetOpenGLContext() { - return openglContext_; + return AlgorithmEnvironment::Get().GetGLContext(); } void ImageProcessingCapiCapability::LoadLibrary() diff --git a/framework/capi/image_processing/include/image_processing_capi_capability.h b/framework/capi/image_processing/include/image_processing_capi_capability.h index 9c7fe7f06422da962125e583e5a43a5dfb9bda8c..183062405c8bcbda0f6c6061fe8225002995338c 100644 --- a/framework/capi/image_processing/include/image_processing_capi_capability.h +++ b/framework/capi/image_processing/include/image_processing_capi_capability.h @@ -106,10 +106,9 @@ public: ImageProcessingCapiCapability(ImageProcessingCapiCapability&&) = delete; ImageProcessingCapiCapability& operator=(ImageProcessingCapiCapability&&) = delete; - ImageProcessing_ErrorCode OpenCLInit(); - ImageProcessing_ErrorCode OpenGLInit(); std::shared_ptr GetOpenGLContext(); ClContext* GetClContext(); + void LoadLibrary(); void UnloadLibrary(); bool CheckColorSpaceConversionSupport( @@ -130,8 +129,6 @@ private: ImageProcessing_ErrorCode LoadAlgo(); - std::shared_ptr openglContext_{nullptr}; - ClContext *openclContext_{nullptr}; std::mutex lock_; int32_t usedInstance_ {0}; void* mLibHandle{}; diff --git a/framework/capi/video_processing/include/video_processing_capi_capability.h b/framework/capi/video_processing/include/video_processing_capi_capability.h index 0b1966aceb82698eb7d4d0afbbeeceeaacd00b2e..767df550dfa3736ae1d8a12bf2d635793cd6bac9 100644 --- a/framework/capi/video_processing/include/video_processing_capi_capability.h +++ b/framework/capi/video_processing/include/video_processing_capi_capability.h @@ -61,10 +61,7 @@ public: static bool IsColorSpaceConversionSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo, const VideoProcessing_ColorSpaceInfo* destinationVideoInfo); static bool IsMetadataGenerationSupported(const VideoProcessing_ColorSpaceInfo* sourceVideoInfo); - static VideoProcessing_ErrorCode OpenGLInit(); static std::shared_ptr GetOpenGLContext(); -private: - static std::shared_ptr openglContext_; }; } // namespace VideoProcessingEngine } // namespace Media diff --git a/framework/capi/video_processing/video_processing.cpp b/framework/capi/video_processing/video_processing.cpp index 2862badb95b4a4c3931dba609e5a44541528f804..945af0f2df80c0193b1fa3100cdb9b0fcf1ec1ce 100644 --- a/framework/capi/video_processing/video_processing.cpp +++ b/framework/capi/video_processing/video_processing.cpp @@ -23,6 +23,7 @@ #include "video_environment_native.h" #include "video_processing_callback_impl.h" #include "video_processing_impl.h" +#include "algorithm_environment.h" using namespace OHOS::Media::VideoProcessingEngine; @@ -62,13 +63,15 @@ VideoProcessing_ErrorCode CallVideoProcessingCallback(VideoProcessing_Callback* VideoProcessing_ErrorCode OH_VideoProcessing_InitializeEnvironment(void) { - CHECK_AND_RETURN_RET_LOG(VideoProcessingCapiCapability::OpenGLInit() == VIDEO_PROCESSING_SUCCESS, - VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, "OpenGLInit failed!"); + CHECK_AND_RETURN_RET_LOG(AlgorithmEnvironment::Get().InitializeGl() == VPE_ALGO_ERR_OK, + VIDEO_PROCESSING_ERROR_INITIALIZE_FAILED, "InitializeGl failed!"); return VideoEnvironmentNative::Get().Initialize(); } VideoProcessing_ErrorCode OH_VideoProcessing_DeinitializeEnvironment(void) { + CHECK_AND_RETURN_RET_LOG(AlgorithmEnvironment::Get().DeinitializeGl() == VPE_ALGO_ERR_OK, + VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, "DeinitializeGl failed!"); return VideoEnvironmentNative::Get().Deinitialize(); } @@ -85,11 +88,13 @@ bool OH_VideoProcessing_IsMetadataGenerationSupported(const VideoProcessing_Colo VideoProcessing_ErrorCode OH_VideoProcessing_Create(OH_VideoProcessing** videoProcessor, int type) { - return OH_VideoProcessing::Create(videoProcessor, type, VideoProcessingCapiCapability::GetOpenGLContext()); + return OH_VideoProcessing::Create(videoProcessor, type, AlgorithmEnvironment::Get().GetGLContext()); } VideoProcessing_ErrorCode OH_VideoProcessing_Destroy(OH_VideoProcessing* videoProcessor) { + CHECK_AND_RETURN_RET_LOG(AlgorithmEnvironment::Get().DeinitializeGl() == VPE_ALGO_ERR_OK, + VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, "DeinitializeGl failed!"); return OH_VideoProcessing::Destroy(videoProcessor); } diff --git a/framework/capi/video_processing/video_processing_capi_capability.cpp b/framework/capi/video_processing/video_processing_capi_capability.cpp index 5ede00e1829a4afa253ef2a945644df4b67fd4b2..c2091527f92343943c5dc8da985227b5e57fde02 100644 --- a/framework/capi/video_processing/video_processing_capi_capability.cpp +++ b/framework/capi/video_processing/video_processing_capi_capability.cpp @@ -14,6 +14,7 @@ */ #include "video_processing_capi_capability.h" +#include "algorithm_environment.h" #include #include @@ -366,18 +367,7 @@ bool VideoProcessingCapiCapability::IsMetadataGenerationSupported( return false; } -std::shared_ptr VideoProcessingCapiCapability::openglContext_ = nullptr; - -VideoProcessing_ErrorCode VideoProcessingCapiCapability::OpenGLInit() -{ - auto status = SetupOpengl(openglContext_); - CHECK_AND_RETURN_RET_LOG(status == static_cast(VIDEO_PROCESSING_SUCCESS), - VIDEO_PROCESSING_ERROR_OPERATION_NOT_PERMITTED, - "OpenGLInit SetupOpengl fail!"); - return VIDEO_PROCESSING_SUCCESS; -} - std::shared_ptr VideoProcessingCapiCapability::GetOpenGLContext() { - return openglContext_; -} + return AlgorithmEnvironment::Get().GetGLContext(); +} \ No newline at end of file diff --git a/interfaces/inner_api/algorithm_common.h b/interfaces/inner_api/algorithm_common.h index 4fb22970fabdec544584febd2f07247ea686d390..88df8c52d4106311159ed83110507d25d59c1399 100644 --- a/interfaces/inner_api/algorithm_common.h +++ b/interfaces/inner_api/algorithm_common.h @@ -27,7 +27,6 @@ typedef struct OpenGLContext OpenGLContext; typedef struct ClContext ClContext; -extern "C" int SetupOpencl(void **pHandle, const char *vendorName, char *deviceName); namespace OHOS { namespace Media { @@ -149,7 +148,6 @@ struct MetadataGeneratorParameter { uint32_t GetColorSpaceType(const CM_ColorSpaceInfo &colorSpaceInfo); CM_ColorSpaceInfo GetColorSpaceInfo(const uint32_t colorSpaceType); -int SetupOpengl(std::shared_ptr &openglHandle); } // namespace VideoProcessingEngine } // namespace Media } // namespace OHOS diff --git a/interfaces/kits/js/detail_enhance_napi_formal.h b/interfaces/kits/js/detail_enhance_napi_formal.h index 88211cf3231781fdab37dcc37e9b619fd8bb6853..d3e1187ed33b1b4d66c1fccf6c53da645f201cc2 100644 --- a/interfaces/kits/js/detail_enhance_napi_formal.h +++ b/interfaces/kits/js/detail_enhance_napi_formal.h @@ -106,6 +106,8 @@ private: sptr& bufferImpl); static std::unique_ptr CreateDstPixelMap(OHOS::Media::PixelMap& source, const OHOS::Media::InitializationOptions& opts); + static void CopySurfaceBufferMetaData(const sptr& srcBuffer, + sptr& destBuffer); static sptr GetSurfaceBufferFromDMAPixelMap( const std::shared_ptr& pixelmap); static napi_value CreateEnumTypeObject(napi_env env, diff --git a/services/BUILD.gn b/services/BUILD.gn index 6f95c1bd501c6f76757910c1667322f4c08053d4..4798dfc8d7c8d20413d04e3dbb6bfabdf3436e6b 100644 --- a/services/BUILD.gn +++ b/services/BUILD.gn @@ -84,6 +84,7 @@ ohos_shared_library("videoprocessingserviceimpl") { "algorithm/video_processing_algorithm_base.cpp", "algorithm/video_processing_algorithm_factory.cpp", "algorithm/video_processing_algorithm_without_data.cpp", + "algorithm/vpe_controller_base.cpp", "utils/configuration_helper.cpp", "utils/surface_buffer_info.cpp", "utils/vpe_sa_utils.cpp", diff --git a/services/algorithm/include/video_processing_algorithm_base.h b/services/algorithm/include/video_processing_algorithm_base.h index 062b17fef91e744e52e044c2211b69fae1021844..3c12c096bca4be22ea237b7d69fd7b12ce1abe58 100644 --- a/services/algorithm/include/video_processing_algorithm_base.h +++ b/services/algorithm/include/video_processing_algorithm_base.h @@ -41,6 +41,8 @@ public: const SurfaceBufferInfo& inputGainmap, SurfaceBufferInfo& outputHdrImage, bool legacy) final; int DecomposeImage(uint32_t clientID, const SurfaceBufferInfo& inputImage, SurfaceBufferInfo& outputSdrImage, SurfaceBufferInfo& outputGainmap) final; + int SetParameter(uint32_t clientID, int tag, const std::vector& parameter) override; + int GetParameter(uint32_t clientID, int tag, std::vector& parameter) override; protected: VideoProcessingAlgorithmBase(const std::string& feature, uint32_t id) : feature_(feature), featureID_(id) {} diff --git a/services/algorithm/include/video_processing_algorithm_factory.h b/services/algorithm/include/video_processing_algorithm_factory.h index 1ff00976ca8d4c8e7875871c51eaf1883d6189ef..76f96ca4a603ea38d32e336a2afc5c4baaf79e60 100644 --- a/services/algorithm/include/video_processing_algorithm_factory.h +++ b/services/algorithm/include/video_processing_algorithm_factory.h @@ -20,6 +20,7 @@ #include #include "ivideo_processing_algorithm.h" +#include "vpe_controller_base.h" namespace OHOS { namespace Media { @@ -34,8 +35,11 @@ public: VideoProcessingAlgorithmFactory& operator=(VideoProcessingAlgorithmFactory&&) = delete; std::shared_ptr Create(const std::string& feature) const; + std::shared_ptr CreateController(const std::uint32_t& feature) const; private: + template + bool LoadDynamic(const std::string& path, const std::string& creatorGetter, T& creators); bool LoadDynamicAlgorithm(const std::string& path); void UnloadDynamicAlgorithm(); void GenerateFeatureIDs(); diff --git a/services/algorithm/include/video_processing_algorithm_factory_common.h b/services/algorithm/include/video_processing_algorithm_factory_common.h index 61d811c3749d43719b44c7d456a152e998a7d87a..613b58e9b9d40de2776aab22e48c3c8aef883e29 100644 --- a/services/algorithm/include/video_processing_algorithm_factory_common.h +++ b/services/algorithm/include/video_processing_algorithm_factory_common.h @@ -22,11 +22,13 @@ #include #include "ivideo_processing_algorithm.h" +#include "vpe_controller_base.h" namespace OHOS { namespace Media { namespace VideoProcessingEngine { using VpeAlgorithmCreator = std::function(const std::string&, uint32_t)>; +using VpeControllerCreator = std::function(void)>; struct VpeAlgorithmCreatorInfo { uint32_t id; @@ -34,6 +36,7 @@ struct VpeAlgorithmCreatorInfo { }; using VpeAlgorithmCreatorMap = std::unordered_map; +using VpeControllerCreatorMap = std::unordered_map; // NOTE: // All algorithms MUST be derived from VideoProcessingAlgorithmWithData or VideoProcessingAlgorithmWithoutData. @@ -44,6 +47,12 @@ std::shared_ptr CreateVpeAlgorithm(const std::string& return std::make_shared(feature, id); } +template +std::shared_ptr CreateVpeController() +{ + return std::make_shared(); +} + template VpeAlgorithmCreatorInfo MakeCreator() { diff --git a/services/algorithm/include/vpe_controller_base.h b/services/algorithm/include/vpe_controller_base.h new file mode 100644 index 0000000000000000000000000000000000000000..091daba073b7a8fd74bb67d8a1e50e832e7095de --- /dev/null +++ b/services/algorithm/include/vpe_controller_base.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_PROCESSING_ENGINE_BASE_CONTROLLER_H +#define VIDEO_PROCESSING_ENGINE_BASE_CONTROLLER_H + +#include + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +class VpeControllerBase { +public: + virtual int SetFeatureParameter(int32_t tag, const std::vector& parameter); + virtual int GetFeatureParameter(int32_t tag, std::vector& parameter); + +protected: + VpeControllerBase() = default; + virtual ~VpeControllerBase() = default; + VpeControllerBase(const VpeControllerBase&) = delete; + VpeControllerBase& operator=(const VpeControllerBase&) = delete; + VpeControllerBase(VpeControllerBase&&) = delete; + VpeControllerBase& operator=(VpeControllerBase&&) = delete; +}; + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VIDEO_PROCESSING_ENGINE_BASE_CONTROLLER_H diff --git a/services/algorithm/video_processing_algorithm_base.cpp b/services/algorithm/video_processing_algorithm_base.cpp index 2e517565f8af132af24a3d89a2d72a864af839b2..459f410f77e442c0d4adcc1eba3e3cb2401e4fbc 100644 --- a/services/algorithm/video_processing_algorithm_base.cpp +++ b/services/algorithm/video_processing_algorithm_base.cpp @@ -79,7 +79,7 @@ int VideoProcessingAlgorithmBase::Add(const std::string& clientName, uint32_t& c int VideoProcessingAlgorithmBase::Del(uint32_t clientID) { std::lock_guard lock(lock_); - bool isEmpty; + bool isEmpty = true; int ret = DelClientIDLocked(clientID, isEmpty); CHECK_AND_RETURN_RET_LOG(ret == VPE_ALGO_ERR_OK, ret, "Failed to del client(%{public}u)!", clientID); if (isEmpty) { @@ -127,6 +127,18 @@ int VideoProcessingAlgorithmBase::DecomposeImage(uint32_t clientID, return DoDecomposeImage(clientID, inputImage, outputSdrImage, outputGainmap); } +int VideoProcessingAlgorithmBase::SetParameter([[maybe_unused]] uint32_t clientID, [[maybe_unused]] int tag, + [[maybe_unused]] const std::vector& parameter) +{ + return 0; +} + +int VideoProcessingAlgorithmBase::GetParameter([[maybe_unused]] uint32_t clientID, [[maybe_unused]] int tag, + [[maybe_unused]] std::vector& parameter) +{ + return 0; +} + int VideoProcessingAlgorithmBase::OnInitializeLocked() { return VPE_ALGO_ERR_OK; diff --git a/services/algorithm/video_processing_algorithm_factory.cpp b/services/algorithm/video_processing_algorithm_factory.cpp index 705dada2827c52571809a42feda7908ca295697d..6ebc5b1bf3c838d8c20ca4860b76eb21a94c0c4f 100644 --- a/services/algorithm/video_processing_algorithm_factory.cpp +++ b/services/algorithm/video_processing_algorithm_factory.cpp @@ -35,6 +35,11 @@ VpeAlgorithmCreatorMap g_creators = { // algorithm begin // algorithm end }; +VpeControllerCreatorMap g_controllerCreators = { + // NOTE: Add static controller which would be called by VPE SA below: + // controller begin + // controller end +}; } VideoProcessingAlgorithmFactory::VideoProcessingAlgorithmFactory() @@ -60,35 +65,55 @@ std::shared_ptr VideoProcessingAlgorithmFactory::Crea return it->second.creator(feature, it->second.id); } -bool VideoProcessingAlgorithmFactory::LoadDynamicAlgorithm(const std::string& path) +std::shared_ptr VideoProcessingAlgorithmFactory::CreateController(const std::uint32_t& feature) const { - handle_ = dlopen(path.c_str(), RTLD_NOW); - if (handle_ == nullptr) { - VPE_LOGD("Can't open library '%{public}s' - %{public}s", path.c_str(), dlerror()); - return false; + auto it = g_controllerCreators.find(feature); + if (it == g_controllerCreators.end()) { + return nullptr; } + return it->second(); +} - using GetCreator = VpeAlgorithmCreatorMap* (*)(); - auto getCreator = reinterpret_cast(dlsym(handle_, "GetDynamicAlgorithmCreator")); - if (getCreator == nullptr) { - VPE_LOGD("Failed to locate GetDynamicAlgorithmCreator in '%{public}s' - %{public}s", path.c_str(), dlerror()); +template +bool VideoProcessingAlgorithmFactory::LoadDynamic(const std::string& path, const std::string& creatorGetter, + T& creators) +{ + if (handle_ == nullptr) { + handle_ = dlopen(path.c_str(), RTLD_NOW); + if (handle_ == nullptr) { + VPE_LOGD("Can't open library '%{public}s' - %{public}s", path.c_str(), dlerror()); + return false; + } + } + using GetDynamicCreator = T* (*)(); + auto getDynamicCreator = reinterpret_cast(dlsym(handle_, creatorGetter.c_str())); + if (getDynamicCreator == nullptr) { + VPE_LOGD("Failed to locate %{public}s in '%{public}s' - %{public}s", + creatorGetter.c_str(), path.c_str(), dlerror()); return false; } - auto dynamicAlgorithms = getCreator(); - if (dynamicAlgorithms == nullptr) { - VPE_LOGD("Failed to GetDynamicAlgorithmCreator() from '%{public}s'", path.c_str()); + auto dynamicCreators = getDynamicCreator(); + if (dynamicCreators == nullptr) { + VPE_LOGD("Failed to %{public}s() from '%{public}s'", creatorGetter.c_str(), path.c_str()); return false; } - auto staticSize = g_creators.size(); - auto dynamicSize = dynamicAlgorithms->size(); - g_creators.merge(*dynamicAlgorithms); - VPE_LOGI("Algorithms: { static:%{public}zu + dynamic:%{public}zu -> total:%{public}zu }", - staticSize, dynamicSize, g_creators.size()); + auto staticSize = creators.size(); + auto dynamicSize = dynamicCreators->size(); + creators.merge(*dynamicCreators); + VPE_LOGI("%{public}s: { static:%{public}zu + dynamic:%{public}zu -> total:%{public}zu }", + creatorGetter.c_str(), staticSize, dynamicSize, creators.size()); + return true; } +bool VideoProcessingAlgorithmFactory::LoadDynamicAlgorithm(const std::string& path) +{ + return LoadDynamic(path, "GetDynamicAlgorithmCreator", g_creators) && + LoadDynamic(path, "GetDynamicControllerCreator", g_controllerCreators); +} + void VideoProcessingAlgorithmFactory::UnloadDynamicAlgorithm() { if (handle_ != nullptr) { diff --git a/services/algorithm/vpe_controller_base.cpp b/services/algorithm/vpe_controller_base.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c995d194fd3fddc0913463820d02ed5878324a9b --- /dev/null +++ b/services/algorithm/vpe_controller_base.cpp @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "vpe_controller_base.h" + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +int VpeControllerBase::SetFeatureParameter([[maybe_unused]] int32_t tag, + [[maybe_unused]]const std::vector& parameter) +{ + return 0; +} + +int VpeControllerBase::GetFeatureParameter([[maybe_unused]] int32_t tag, + [[maybe_unused]] std::vector& parameter) +{ + return 0; +} diff --git a/services/include/ivideo_processing_server_listener.h b/services/include/ivideo_processing_server_listener.h new file mode 100644 index 0000000000000000000000000000000000000000..3ee9d2002811408b02fa3c4464406efa2ab95932 --- /dev/null +++ b/services/include/ivideo_processing_server_listener.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VPE_VIDEO_PROCESSING_SERVER_LISTENER_H +#define VPE_VIDEO_PROCESSING_SERVER_LISTENER_H + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { +class IVideoProcessingServerListener { +public: + virtual void OnServerDied() = 0; + +protected: + IVideoProcessingServerListener() = default; + virtual ~IVideoProcessingServerListener() = default; + IVideoProcessingServerListener(const IVideoProcessingServerListener&) = delete; + IVideoProcessingServerListener& operator=(const IVideoProcessingServerListener&) = delete; + IVideoProcessingServerListener(IVideoProcessingServerListener&&) = delete; + IVideoProcessingServerListener& operator=(IVideoProcessingServerListener&&) = delete; +}; +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS + +#endif // VPE_VIDEO_PROCESSING_SERVER_LISTENER_H \ No newline at end of file diff --git a/services/include/video_processing_client.h b/services/include/video_processing_client.h index c2df15ab479d58b4692c0a3a208efe12a9ea52b7..d2487481510abdcf05a307311b07f940e892be20 100644 --- a/services/include/video_processing_client.h +++ b/services/include/video_processing_client.h @@ -20,16 +20,20 @@ #include #include #include +#include #include #include +#include #include +#include "ffrt_inner.h" #include "ipc_types.h" #include "iremote_object.h" #include "refbase.h" #include "system_ability_load_callback_stub.h" #include "algorithm_errors.h" +#include "ivideo_processing_server_listener.h" #include "surface_buffer_info.h" #include "video_processing_service_manager_proxy.h" #include "vpe_log.h" @@ -178,6 +182,20 @@ public: VPEAlgoErrCode DecomposeImage(uint32_t clientID, const SurfaceBufferInfo& inputImage, SurfaceBufferInfo& outputSdrImage, SurfaceBufferInfo& outputGainmap); + /* + * @brief Register listener for VPE SA status or data changing. + * @param listener The listener to be registered. + * @return VPE_ALGO_ERR_OK if creation is successful. Other values if failed. See algorithm_errors.h. + */ + VPEAlgoErrCode RegisterServerListener(const std::shared_ptr& listener); + + /* + * @brief Unregister listener for VPE SA status or data changing. + * @param listener The listener to be unregistered. + * @return VPE_ALGO_ERR_OK if creation is successful. Other values if failed. See algorithm_errors.h. + */ + VPEAlgoErrCode UnregisterServerListener(const std::shared_ptr& listener); + private: // Inner callback class for SA loading class LoadCallback : public SystemAbilityLoadCallbackStub { @@ -220,6 +238,10 @@ private: VideoProcessingManager(VideoProcessingManager&&) = delete; VideoProcessingManager& operator=(VideoProcessingManager&&) = delete; + bool OnInitialized(); + bool OnDeinitialized(); + bool CreateSaTaskQueue(); + bool CreateSaTaskQueueInner(); sptr GetService(); void OnSaLoad(const sptr& remoteObject); void OnSaDied(const wptr& remoteObject); @@ -227,14 +249,27 @@ private: const LogInfo& logInfo); void ClearSa(); void ClearSaLocked(); + bool IsTaskQueueValid(); + void Submit(std::function&& task); + void NotifyListeners( + std::function&)>&& operation); + + std::atomic refCount_{}; std::condition_variable cvProxy_{}; std::mutex lock_{}; // Guarded by lock_ begin std::atomic isLoading_{}; sptr proxy_{}; + sptr observer_{}; // Guarded by lock_ end - std::atomic deadRetryCount_{}; + + ffrt::mutex listenerLock_{}; + // Guarded by listenerLock_ begin + std::atomic hasTaskQueue_{}; + std::unique_ptr taskQueue_{}; + std::unordered_map> listeners_{}; + // Guarded by listenerLock_ end }; } // namespace VideoProcessingEngine } // namespace Media diff --git a/services/src/video_processing_client.cpp b/services/src/video_processing_client.cpp index 96cf6f05c6a7ceae92047d847af1664a4e84a7da..24847a8d91167c8b36b76c62cbe5f0ef485ad3ad 100644 --- a/services/src/video_processing_client.cpp +++ b/services/src/video_processing_client.cpp @@ -29,7 +29,7 @@ using VpeSa = IVideoProcessingServiceManager; namespace { std::mutex g_proxyLock; constexpr int ERROR_DEAD_REPLY = 29189; -constexpr int DEAD_RETRY_COUNT = 5; +const std::string SA_TASK_QUEUE_THREAD = "vpe_sa_cb_queue"; } VideoProcessingManager& VideoProcessingManager::GetInstance() @@ -42,11 +42,22 @@ void VideoProcessingManager::Connect() { VPE_LOGD("call GetService"); GetService(); + if (refCount_.load() == 0) { + OnInitialized(); + } + refCount_++; + VPE_LOGD("refCount_=%d", refCount_.load()); } void VideoProcessingManager::Disconnect() { VPE_LOGD("VideoProcessingManager Disconnect!"); + refCount_--; + if (refCount_ <= 0) { + OnDeinitialized(); + refCount_ = 0; + } + VPE_LOGD("refCount_=%d", refCount_.load()); return; } @@ -136,6 +147,94 @@ VPEAlgoErrCode VideoProcessingManager::DecomposeImage(uint32_t clientID, const S VPE_LOG_INFO); } +VPEAlgoErrCode VideoProcessingManager::RegisterServerListener( + const std::shared_ptr& listener) +{ + CHECK_AND_RETURN_RET_LOG(listener != nullptr, VPE_ALGO_ERR_INVALID_VAL, "Invalid input: listener is null!"); + std::lock_guard listenerLock(listenerLock_); + auto id = reinterpret_cast(listener.get()); + auto it = listeners_.find(id); + if (it != listeners_.end()) [[unlikely]] { + VPE_LOGW("Invalid input: listener is already exist!"); + return VPE_ALGO_ERR_OK; + } + listeners_[id] = listener; + VPE_LOGD("Add new listener, size=%{public}zu", listeners_.size()); + return VPE_ALGO_ERR_OK; +} + +VPEAlgoErrCode VideoProcessingManager::UnregisterServerListener( + const std::shared_ptr& listener) +{ + CHECK_AND_RETURN_RET_LOG(listener != nullptr, VPE_ALGO_ERR_INVALID_VAL, "Invalid input: listener is null!"); + std::lock_guard listenerLock(listenerLock_); + auto id = reinterpret_cast(listener.get()); + auto it = listeners_.find(id); + if (it == listeners_.end()) [[unlikely]] { + VPE_LOGW("Invalid input: listener is NOT exist!"); + return VPE_ALGO_ERR_OK; + } + listeners_.erase(it); + VPE_LOGD("Delete listener, size=%{public}zu", listeners_.size()); + return VPE_ALGO_ERR_OK; +} + +bool VideoProcessingManager::OnInitialized() +{ + return CreateSaTaskQueue(); +} + +bool VideoProcessingManager::OnDeinitialized() +{ + { + std::lock_guard listenerLock(listenerLock_); + if (taskQueue_ != nullptr) { + listeners_.clear(); + taskQueue_ = nullptr; + } + hasTaskQueue_.store(false); + } + std::lock_guard lock(lock_); + bool ret = true; + do { + if (proxy_ == nullptr) { + break; + } + auto remoteObject = proxy_->AsObject(); + if (remoteObject == nullptr) { + VPE_LOGE("remote is null and force to set proxy_ and observer_ to null!"); + ret = false; + break; + } + if (!remoteObject->RemoveDeathRecipient(observer_)) { + VPE_LOGE("Failed to RemoveDeathRecipient!"); + ret = false; + break; + } + } while (false); + observer_ = nullptr; + proxy_ = nullptr; + return ret; +} + +bool VideoProcessingManager::CreateSaTaskQueue() +{ + std::lock_guard listenerLock(listenerLock_); + return CreateSaTaskQueueInner(); +} + +bool VideoProcessingManager::CreateSaTaskQueueInner() +{ + if (taskQueue_ != nullptr) { + return true; + } + taskQueue_ = std::make_unique(SA_TASK_QUEUE_THREAD.c_str(), + ffrt::queue_attr().qos(ffrt::qos_user_initiated)); + CHECK_AND_RETURN_RET_LOG(taskQueue_ != nullptr, false, "Failed to create task queue!"); + hasTaskQueue_.store(true); + return true; +} + sptr VideoProcessingManager::GetService() { do { @@ -194,6 +293,7 @@ void VideoProcessingManager::OnSaLoad(const sptr& remoteObject) CHECK_AND_RETURN_LOG(observer != nullptr, "Failed to create DeathObserver!"); CHECK_AND_RETURN_LOG(remoteObject->AddDeathRecipient(observer), "Failed to AddDeathRecipient!"); VPE_LOGD("AddDeathRecipient success."); + observer_ = observer; proxy_ = iface_cast(remoteObject); VPE_LOGI("SA load success."); } else { @@ -207,8 +307,7 @@ void VideoProcessingManager::OnSaLoad(const sptr& remoteObject) void VideoProcessingManager::OnSaDied([[maybe_unused]] const wptr& remoteObject) { - std::lock_guard lock(lock_); - proxy_ = nullptr; + ClearSa(); } VPEAlgoErrCode VideoProcessingManager::Execute( @@ -222,13 +321,6 @@ VPEAlgoErrCode VideoProcessingManager::Execute( auto err = static_cast(operation(proxy)); if (err == ERROR_DEAD_REPLY) { ClearSa(); - if (deadRetryCount_.load() < DEAD_RETRY_COUNT) { - deadRetryCount_++; - VPE_ORG_LOGD(logInfo, "<%{public}d> Retry to check SA again for dead reply.", deadRetryCount_.load()); - return Execute(std::move(operation), logInfo); - } - } else { - deadRetryCount_.store(0); } return err; } @@ -243,6 +335,50 @@ void VideoProcessingManager::ClearSaLocked() { proxy_ = nullptr; isLoading_ = false; + Submit( + [this] { + NotifyListeners( + [](const std::shared_ptr& listener) { + VPE_LOGD("Call listener->OnServerDied()"); + return listener->OnServerDied(); + }); + }); +} + +bool VideoProcessingManager::IsTaskQueueValid() +{ + if (hasTaskQueue_.load()) [[likely]] { + return true; + } + return CreateSaTaskQueueInner(); +} + +void VideoProcessingManager::Submit(std::function&& task) +{ + std::lock_guard listenerLock(listenerLock_); + CHECK_AND_RETURN_LOG(IsTaskQueueValid(), "Task queue is invalid!"); + taskQueue_->submit(std::move(task)); +} + +void VideoProcessingManager::NotifyListeners( + std::function&)>&& operation) +{ + std::lock_guard listenerLock(listenerLock_); + VPE_LOGD("+ listeners.size=%{public}zu", listeners_.size()); + std::vector invalidIds{}; + for (auto& [id, weak] : listeners_) { + auto listener = weak.lock(); + if (listener == nullptr) { + invalidIds.push_back(id); + VPE_LOGD("%{public}zu listeners are expired.", invalidIds.size()); + continue; + } + operation(listener); + } + for (auto& id : invalidIds) { + listeners_.erase(id); + } + VPE_LOGD("- listeners.size=%{public}zu", listeners_.size()); } void VideoProcessingManager::LoadCallback::OnLoadSystemAbilitySuccess([[maybe_unused]] int32_t systemAbilityId, diff --git a/test/fuzztest/services_fuzzer/BUILD.gn b/test/fuzztest/services_fuzzer/BUILD.gn index 0ccbacfb61c3b4f39f9066afa771b184f11c9133..8ab1af6aaf6e9c43e44cd7658e84b20c008fc381 100644 --- a/test/fuzztest/services_fuzzer/BUILD.gn +++ b/test/fuzztest/services_fuzzer/BUILD.gn @@ -42,6 +42,7 @@ ohos_fuzztest("ServicesFuzzTest") { external_deps = [ "c_utils:utils", + "ffrt:libffrt", "graphic_surface:surface", "hilog:libhilog", "ipc:ipc_single", diff --git a/test/moduletest/colorspace_converter/csc_module_test.cpp b/test/moduletest/colorspace_converter/csc_module_test.cpp index f2de8d9ed2c719470278dcc2d37c57d99702c57b..55d767ac6fd5453419624d3f0cb61007466ee617 100644 --- a/test/moduletest/colorspace_converter/csc_module_test.cpp +++ b/test/moduletest/colorspace_converter/csc_module_test.cpp @@ -70,7 +70,7 @@ sptr CSCModuleTest::PrepareOneFrame() * @tc.func : Create * @tc.desc : Test for ColorSpaceConverter Create */ -HWTEST_F(CSCModuleTest, Create_0101, TestSize.Level1) +HWTEST_F(CSCModuleTest, Create_0101, TestSize.Level0) { auto plugin = ColorSpaceConverter::Create(); ASSERT_NE(nullptr, plugin); diff --git a/test/moduletest/colorspace_converter_video/api_test.cpp b/test/moduletest/colorspace_converter_video/api_test.cpp index 75dbc85bd88c03a60e50b74ea120536bcb8e4314..65877d485f6a54b13ef7bb2a79b3c54f15a3c9db 100644 --- a/test/moduletest/colorspace_converter_video/api_test.cpp +++ b/test/moduletest/colorspace_converter_video/api_test.cpp @@ -182,7 +182,7 @@ uint32_t CSCVInnerApiTest::FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer) * @tc.name : release output buffer api with illegal parameter * @tc.desc : function test */ -HWTEST_F(CSCVInnerApiTest, CSCV_API_0010, TestSize.Level2) +HWTEST_F(CSCVInnerApiTest, CSCV_API_0010, TestSize.Level1) { int32_t ret = 0; std::shared_ptr cscv2; diff --git a/test/moduletest/colorspace_converter_video/state_test.cpp b/test/moduletest/colorspace_converter_video/state_test.cpp index 2ea5f97eebd8ffa7f31005a896793932644065f8..be4702e9e8c4efe69651d3d3fbea68f9f8e96e62 100644 --- a/test/moduletest/colorspace_converter_video/state_test.cpp +++ b/test/moduletest/colorspace_converter_video/state_test.cpp @@ -162,7 +162,7 @@ void CSCVInnerStateTest::AllPrepareFunc() * @tc.name : call all combination of prepare-state func * @tc.desc : state test */ -HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0010, TestSize.Level1) +HWTEST_F(CSCVInnerStateTest, CSCV_STATE_0010, TestSize.Level0) { std::vector nums = {1, 2, 3, 4}; int32_t ret = 0; diff --git a/test/moduletest/metadata_generator/mg_module_test.cpp b/test/moduletest/metadata_generator/mg_module_test.cpp index a6d7c416b7119d600781cf280427cff62070e78f..121afdf31ff0eb5e07ee7f6a51402a16f2f861ee 100644 --- a/test/moduletest/metadata_generator/mg_module_test.cpp +++ b/test/moduletest/metadata_generator/mg_module_test.cpp @@ -75,7 +75,7 @@ sptr MGModuleTest::CreateSurfaceBuffer() * @tc.func : Create * @tc.desc : Test for MetadataGenerator Create */ -HWTEST_F(MGModuleTest, Create_0101, TestSize.Level1) +HWTEST_F(MGModuleTest, Create_0101, TestSize.Level0) { auto plugin = MetadataGenerator::Create(); ASSERT_NE(nullptr, plugin); diff --git a/test/moduletest/metadata_generator_video/state_test.cpp b/test/moduletest/metadata_generator_video/state_test.cpp index af3afd1e036b181de4e8a4120f7a8122e418aab3..5cb0376f285670961ca7ba69c43c64f1ecf72e36 100644 --- a/test/moduletest/metadata_generator_video/state_test.cpp +++ b/test/moduletest/metadata_generator_video/state_test.cpp @@ -141,7 +141,7 @@ void MDGInnerStateTest::AllPrepareFunc() * @tc.name : call all combination of prepare-state func * @tc.desc : state test */ -HWTEST_F(MDGInnerStateTest, CSCV_STATE_0010, TestSize.Level1) +HWTEST_F(MDGInnerStateTest, CSCV_STATE_0010, TestSize.Level0) { std::vector nums = {1, 2, 3, 4}; int32_t ret = 0; diff --git a/test/nativedemo/vpe_demo/detail_enhancer_demo.cpp b/test/nativedemo/vpe_demo/detail_enhancer_demo.cpp index 86b68415888ebb99ff281262c0cbd606b8df95d0..6aa6ee52b0e1a9499b5c56c9a635c8d0b44d5523 100644 --- a/test/nativedemo/vpe_demo/detail_enhancer_demo.cpp +++ b/test/nativedemo/vpe_demo/detail_enhancer_demo.cpp @@ -36,10 +36,6 @@ using namespace Media; using namespace VideoProcessingEngine; namespace { -const float SIZE_COEF_YUV420 = 1.5; -const float SIZE_COEF_RGBA8888 = 4; -const float SIZE_COEF_YUV444 = 3; - std::shared_ptr DetailEnhancerImageCreate() { auto detailEnh = DetailEnhancerImage::Create(); @@ -75,26 +71,7 @@ std::string GetFormatName(int32_t format) return formatName; } -int32_t GetFileSize(int32_t width, int32_t height, int32_t format) -{ - int32_t size = width * height; - switch (format) { - case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP: - case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P: - size = size * SIZE_COEF_YUV420; - break; - case OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888: - case OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888: - case OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102: - size *= SIZE_COEF_RGBA8888; - break; - default: - TEST_LOG("Unknow format:%d", format); - size *= SIZE_COEF_YUV444; - break; - } - return size; -} + int32_t GetImageType(int32_t format) { @@ -138,7 +115,7 @@ void Process(std::shared_ptr detailEnh, std::string_view in TEST_LOG("inputFile:%s", inputFile.data()); std::unique_ptr yuvFile = std::make_unique(inputFile.data(), std::ios::binary | std::ios::in); - ReadYuvFile(input, yuvFile, GetFileSize(inputWidth, inputHeight, inputFormat)); + ReadYuvFile(input, yuvFile, inputWidth, inputHeight, inputFormat, input->GetStride()); auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); yuvFile->seekg(0); yuvFile->close(); @@ -150,9 +127,7 @@ void Process(std::shared_ptr detailEnh, std::string_view in } if (needDump) { std::unique_ptr outputImage = std::make_unique( - "/data/test/media/output/" + GetFormatName(inputFormat) + "To" + GetFormatName(outputFormat) + "_" + - std::to_string(outputWidth) + "x" + std::to_string(outputHeight) + "_" + - std::to_string(output->GetStride()) + ".yuv", + "/data/out/output.raw", std::ios::binary | std::ios::out | std::ios::trunc); outputImage->write(static_cast(output->GetVirAddr()), output->GetSize()); outputImage->close(); @@ -237,7 +212,6 @@ int32_t main([[maybe_unused]]int argc, char* argv[]) DetailEnhancerParameters param { .uri = "", .level = static_cast(levelToProcess), - .forceEve = 1, }; if (detailEnh->SetParameter(param)!= VPE_ALGO_ERR_OK) { printf("Init failed!"); @@ -254,7 +228,7 @@ int32_t main([[maybe_unused]]int argc, char* argv[]) std::string_view inputFile = inputFilePath; std::unique_ptr yuvFile = std::make_unique(inputFile.data(), std::ios::binary | std::ios::in); - ReadYuvFile(input, yuvFile, GetFileSize(inputWidth, inputHeight, GetImageType(pixelFormat))); + ReadYuvFile(input, yuvFile, inputWidth, inputHeight, GetImageType(pixelFormat), input->GetStride()); auto output = CreateSurfaceBuffer(GetImageType(pixelFormat), outputWidth, outputHeight); RunWithSo(input, output, static_cast(levelToProcess)); yuvFile->close(); diff --git a/test/unittest/aihdr_enhancer_video/aihdr_enhancer_video_unit_test.cpp b/test/unittest/aihdr_enhancer_video/aihdr_enhancer_video_unit_test.cpp index 3e6aa6f38fc8b693fec44cf2e83b2f55143eaf1b..2aae76937a494f4c8e86fee4f20deb389a8e56fa 100644 --- a/test/unittest/aihdr_enhancer_video/aihdr_enhancer_video_unit_test.cpp +++ b/test/unittest/aihdr_enhancer_video/aihdr_enhancer_video_unit_test.cpp @@ -29,6 +29,7 @@ #include "aihdr_enhancer_video_impl.h" #include "aihdr_enhancer_video.h" #include "surface/window.h" +#include "securec.h" #include "external_window.h" using namespace std; @@ -37,6 +38,7 @@ constexpr int64_t NANOS_IN_SECOND = 1000000000L; constexpr int64_t NANOS_IN_MICRO = 1000L; constexpr uint32_t DEFAULT_WIDTH = 1920; constexpr uint32_t DEFAULT_HEIGHT = 1080; +constexpr uint32_t DEFAULT_BYTE = 32; namespace OHOS { namespace Media { @@ -76,6 +78,12 @@ public: static void TearDownTestCase(void); void SetUp(); void TearDown(); + void InitBufferConfig(); + OHNativeWindow *nativeWindow1; + BufferFlushConfig flushCfg_{}; + BufferRequestConfig requestCfg_{}; + GSError SetMeatadata(sptr &buffer, uint32_t value); + GSError SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo); sptr surface; OHNativeWindow *nativeWindow; uint32_t FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer); @@ -110,6 +118,49 @@ int64_t GetSystemTime() return nanoTime / NANOS_IN_MICRO; } +void AihdrEnhancerVideoUnitTest::InitBufferConfig() +{ + requestCfg_.usage = + BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_HW_RENDER | BUFFER_USAGE_HW_TEXTURE; + requestCfg_.width = DEFAULT_WIDTH; + requestCfg_.height = DEFAULT_HEIGHT; + requestCfg_.timeout = 0; + requestCfg_.strideAlignment = DEFAULT_BYTE; + flushCfg_.damage.x = 0; + flushCfg_.damage.y = 0; + flushCfg_.damage.w = DEFAULT_WIDTH; + flushCfg_.damage.h = DEFAULT_HEIGHT; + flushCfg_.timestamp = 0; +} + +GSError AihdrEnhancerVideoUnitTest::SetMeatadata(sptr &buffer, uint32_t value) +{ + if (buffer == nullptr) { + return GSERROR_INVALID_ARGUMENTS; + } + std::vector metadata; + metadata.resize(sizeof(value)); + if (memcpy_s(metadata.data(), metadata.size(), &value, sizeof(value)) != EOK) { + return GSERROR_MEM_OPERATION_ERROR; + } + GSError err = buffer->SetMetadata(ATTRKEY_HDR_METADATA_TYPE, metadata); + return err; +} + +GSError AihdrEnhancerVideoUnitTest::SetMeatadata(sptr &buffer, CM_ColorSpaceInfo &colorspaceInfo) +{ + if (buffer == nullptr) { + return GSERROR_INVALID_ARGUMENTS; + } + std::vector metadata; + metadata.resize(sizeof(CM_ColorSpaceInfo)); + if (memcpy_s(metadata.data(), metadata.size(), &colorspaceInfo, sizeof(CM_ColorSpaceInfo)) != EOK) { + return GSERROR_MEM_OPERATION_ERROR; + } + GSError err = buffer->SetMetadata(ATTRKEY_COLORSPACE_INFO, metadata); + return err; +} + uint32_t AihdrEnhancerVideoUnitTest::FlushSurf(OHNativeWindowBuffer *ohNativeWindowBuffer) { struct Region region; @@ -398,6 +449,266 @@ HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_17, TestSize.Level1) } aihdrEnhancerVideo2->Release(); aihdrEnhancerVideo3->Release(); + aev->Release(); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_19, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr ahe_; + ahe_ = AihdrEnhancerVideo::Create(); + std::shared_ptr ahe2; + OHNativeWindow *nativeWindow2; + std::shared_ptr cb = std::make_shared(); + ret = ahe_->SetCallback(cb); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ahe_->GetSurface(&nativeWindow1); + ASSERT_NE(nativeWindow1, nullptr); + ahe2 = AihdrEnhancerVideo::Create(); + ahe2->GetSurface(&nativeWindow2); + ret = ahe_->SetSurface(nativeWindow2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->ReleaseOutputBuffer(100000, true); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); + ahe_->Release(); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_20, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr ahe_; + ahe_ = AihdrEnhancerVideo::Create(); + std::shared_ptr ahe2; + OHNativeWindow *nativeWindow2; + std::shared_ptr cb = std::make_shared(); + ret = ahe_->SetCallback(cb); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ahe_->GetSurface(&nativeWindow1); + ASSERT_NE(nativeWindow1, nullptr); + ahe2 = AihdrEnhancerVideo::Create(); + ahe2->GetSurface(&nativeWindow2); + ret = ahe_->SetSurface(nativeWindow2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + + ret = ahe_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ahe_->Release(); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_21, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr ahe_; + ahe_ = AihdrEnhancerVideo::Create(); + OHNativeWindowBuffer *ohNativeWindowBuffer; + std::shared_ptr cb = std::make_shared(); + ret = ahe_->SetCallback(cb); + ahe_->GetSurface(&nativeWindow1); + std::shared_ptr ahe2 = AihdrEnhancerVideo::Create(); + OHNativeWindow *nativeWindow2; + ahe2->GetSurface(&nativeWindow2); + ret = ahe_->SetSurface(nativeWindow2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Start(); + + int fenceFd = -1; + nativeWindow = nativeWindow1; + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); + ahe_->Release(); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_22, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr ahe_; + ahe_ = AihdrEnhancerVideo::Create(); + std::shared_ptr cscvImpl; + cscvImpl = make_shared(); + std::shared_ptr cb = std::make_shared(); + ret = ahe_->SetCallback(cb); + OHNativeWindow *nativeWindow2; + ahe_->GetSurface(&nativeWindow2); + ret = cscvImpl->Init(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscvImpl->SetSurface(nativeWindow2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = cscvImpl->OnProducerBufferReleased(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ahe_->Release(); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_23, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr ahe_; + ahe_ = AihdrEnhancerVideo::Create(); + OHNativeWindowBuffer *ohNativeWindowBuffer; + std::shared_ptr cb = std::make_shared(); + ret = ahe_->SetCallback(cb); + ahe_->GetSurface(&nativeWindow1); + std::shared_ptr ahe2 = AihdrEnhancerVideo::Create(); + OHNativeWindow *nativeWindow2; + ahe2->GetSurface(&nativeWindow2); + ret = ahe_->SetSurface(nativeWindow2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Start(); + + int fenceFd = -1; + nativeWindow = nativeWindow1; + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_FORMAT, GRAPHIC_PIXEL_FMT_YCBCR_420_SP); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowHandleOpt(nativeWindow, SET_BUFFER_GEOMETRY, DEFAULT_WIDTH, DEFAULT_HEIGHT); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = OH_NativeWindow_NativeWindowRequestBuffer(nativeWindow, &ohNativeWindowBuffer, &fenceFd); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = FlushSurf(ohNativeWindowBuffer); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->ReleaseOutputBuffer(0, true); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); + OH_NativeWindow_DestroyNativeWindow(nativeWindow); + ahe_->Release(); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_24, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr ahe_; + ahe_ = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + ret = ahe_->SetCallback(cb); + OHNativeWindow *nativeWindow1; + ahe_->GetSurface(&nativeWindow1); + std::shared_ptr ahe2; + ahe2 = AihdrEnhancerVideo::Create(); + OHNativeWindow *nativeWindow2; + ahe2->GetSurface(&nativeWindow2); + ret = ahe_->SetSurface(nativeWindow1); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->SetSurface(nativeWindow1); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->SetSurface(nativeWindow2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->ReleaseOutputBuffer(0, 0); + ASSERT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); + ahe_->Release(); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_25, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr ahe_; + ahe_ = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + std::queue> AppInBufferAvilQue; + sptr buffer; + sptr buffer2; + int32_t fence = -1; + requestCfg_.format = GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + InitBufferConfig(); + OHNativeWindow *nativeWindow1; + ahe_->GetSurface(&nativeWindow1); + std::shared_ptr ahe2; + ahe2 = AihdrEnhancerVideo::Create(); + OHNativeWindow *nativeWindow2; + ahe2->GetSurface(&nativeWindow2); + ret = ahe_->SetCallback(cb); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->SetSurface(nativeWindow2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + GSError err = nativeWindow1->surface->RequestBuffer(buffer, fence, requestCfg_); + ASSERT_EQ(err, GSERROR_OK); + AppInBufferAvilQue.push(buffer); + err = nativeWindow1->surface->FlushBuffer(buffer, -1, flushCfg_); + ASSERT_EQ(err, GSERROR_OK); + ret = ahe_->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ahe2->Release(); + ahe_->Release(); +} + +HWTEST_F(AihdrEnhancerVideoUnitTest, aihdrEnhancer_26, TestSize.Level2) +{ + int32_t ret = 0; + std::shared_ptr ahe_; + ahe_ = AihdrEnhancerVideo::Create(); + std::shared_ptr cb = std::make_shared(); + std::queue> AppInBufferAvilQue; + sptr buffer; + sptr buffer2; + int32_t fence = -1; + requestCfg_.format = GRAPHIC_PIXEL_FMT_YCBCR_420_SP; + InitBufferConfig(); + OHNativeWindow *nativeWindow1; + ahe_->GetSurface(&nativeWindow1); + std::shared_ptr ahe2; + ahe2 = AihdrEnhancerVideo::Create(); + OHNativeWindow *nativeWindow2; + ahe2->GetSurface(&nativeWindow2); + std::shared_ptr ahe3; + ahe3 = AihdrEnhancerVideo::Create(); + OHNativeWindow *nativeWindow3; + ahe3->GetSurface(&nativeWindow3); + ret = ahe_->SetCallback(cb); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->SetSurface(nativeWindow2); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Configure(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Prepare(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->Start(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + GSError err = nativeWindow1->surface->RequestBuffer(buffer, fence, requestCfg_); + ASSERT_EQ(err, GSERROR_OK); + AppInBufferAvilQue.push(buffer); + err = nativeWindow1->surface->FlushBuffer(buffer, -1, flushCfg_); + ASSERT_EQ(err, GSERROR_OK); + ret = ahe_->SetSurface(nativeWindow3); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ret = ahe_->NotifyEos(); + ASSERT_EQ(ret, VPE_ALGO_ERR_OK); + ahe_->Release(); + ahe2->Release(); + ahe3->Release(); } } // namespace VideoProcessingEngine } // namespace Media diff --git a/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp b/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp index a3f1e0d6dc48b0f3fa1c64bd386d6fee9d3b975f..8783d5075eb4d5f6d4e66d8cce3d00da674e1fb5 100644 --- a/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp +++ b/test/unittest/colorSpace_converter_video_ndk/colorSpace_converter_video_ndk_unit_test.cpp @@ -359,7 +359,7 @@ HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, testVideoProcess_testFunSuppor EXPECT_EQ(resultSupported, false); } -HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, testVideoProcess_testFun, TestSize.Level1) +HWTEST_F(ColorSpaceConverterVideoNdkImplUnitTest, testVideoProcess_testFun, TestSize.Level0) { VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); diff --git a/test/unittest/colorspace_converter_video/colorspace_converter_video_unit_test.cpp b/test/unittest/colorspace_converter_video/colorspace_converter_video_unit_test.cpp index 50ab0386b3531672911f8a8279373677906ee6e7..0580f80bb52b6ff362a8a003bcbfeb1326a2eedc 100644 --- a/test/unittest/colorspace_converter_video/colorspace_converter_video_unit_test.cpp +++ b/test/unittest/colorspace_converter_video/colorspace_converter_video_unit_test.cpp @@ -61,7 +61,7 @@ public: void TearDown(void) {}; }; -HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_init_01, TestSize.Level1) +HWTEST_F(ColorSpaceConverterVideoUnitTest, cscv_init_01, TestSize.Level0) { auto handle = ColorSpaceConvertVideoCreate(); EXPECT_NE(nullptr, handle); diff --git a/test/unittest/contrast_enhancer/contrast_enhancer_unit_test.cpp b/test/unittest/contrast_enhancer/contrast_enhancer_unit_test.cpp index 2bbf849c873ce5236b086ecb83fa4575c62b904c..066a5e85cd3f340dda705077a343b0d5a2892fd1 100644 --- a/test/unittest/contrast_enhancer/contrast_enhancer_unit_test.cpp +++ b/test/unittest/contrast_enhancer/contrast_enhancer_unit_test.cpp @@ -1,3 +1,18 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + #include #include #include @@ -20,7 +35,10 @@ using namespace std; using namespace testing::ext; -using namespace OHOS::Media::VideoProcessingEngine; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { class ContrastEnhancerUnitTest : public ::testing::Test { protected: @@ -33,7 +51,7 @@ protected: } }; -sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height) +sptr CreateBuffer(uint32_t pixelFormat, int32_t width, int32_t height) { if (width <= 0 || height <= 0) { cout << "Invalid resolution" << endl; @@ -53,7 +71,7 @@ sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int inputCfg.format = pixelFormat; inputCfg.timeout = 0; if (GSERROR_OK != buffer->Alloc(inputCfg)) { - cout << "Alloc surface buffer{" + std::to_string(inputCfg.width) + std::to_string(inputCfg.strideAlignment) + + cout << "Alloc surface buffer{" + std::to_string(inputCfg.width) + std::to_string(inputCfg.strideAlignment) + "x" + std::to_string(inputCfg.height) + "format:" + std::to_string(inputCfg.format) + "} failed" << endl; return nullptr; } @@ -101,7 +119,7 @@ HWTEST_F(ContrastEnhancerUnitTest, GetParameter_NormalCase, TestSize.Level1) // 测试GetRegionHist函数 HWTEST_F(ContrastEnhancerUnitTest, GetRegionHist_NormalCase, TestSize.Level1) { - sptr input = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1024, 768); + sptr input = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1024, 768); // 初始化input auto contrastEnhancer = ContrastEnhancerImage::Create(); EXPECT_NE(contrastEnhancer, nullptr); @@ -123,7 +141,7 @@ HWTEST_F(ContrastEnhancerUnitTest, GetRegionHist_NormalCase, TestSize.Level1) HWTEST_F(ContrastEnhancerUnitTest, GetRegionHist_InvalidFormat, TestSize.Level1) { - sptr input = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 1024, 768); + sptr input = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 1024, 768); EXPECT_NE(input, nullptr); // 初始化input auto contrastEnhancer = ContrastEnhancerImage::Create(); @@ -153,7 +171,7 @@ HWTEST_F(ContrastEnhancerUnitTest, UpdateMetadataBasedOnHist_ImplDirectly, TestS .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -218,7 +236,7 @@ HWTEST_F(ContrastEnhancerUnitTest, UpdateMetadataBasedOnHist_InvalidFormat, Test .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 1024, 768); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888, 1024, 768); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -265,7 +283,7 @@ HWTEST_F(ContrastEnhancerUnitTest, UpdateMetadataBasedOnPixel_NormalCase, TestSi .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1024, 768); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1024, 768); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -343,7 +361,7 @@ HWTEST_F(ContrastEnhancerUnitTest, UpdateMetadataBasedOnHist_MaxRatio, TestSize. .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -378,7 +396,7 @@ HWTEST_F(ContrastEnhancerUnitTest, UpdateMetadataBasedOnHist_MinRatio, TestSize. .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -413,7 +431,7 @@ HWTEST_F(ContrastEnhancerUnitTest, UpdateMetadataBasedOnHist_DoubleClick, TestSi .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -448,7 +466,7 @@ HWTEST_F(ContrastEnhancerUnitTest, UpdateMetadataBasedOnHist_InvalidDuration, Te .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -489,7 +507,7 @@ HWTEST_F(ContrastEnhancerUnitTest, Concurrency_scenarios, TestSize.Level1) .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -534,7 +552,7 @@ HWTEST_F(ContrastEnhancerUnitTest, Concurrency_scenarios_with_animation, TestSiz .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -579,7 +597,7 @@ HWTEST_F(ContrastEnhancerUnitTest, Get_Algo_Twice, TestSize.Level1) .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -614,7 +632,7 @@ HWTEST_F(ContrastEnhancerUnitTest, Get_Algo_Failed, TestSize.Level1) .w = 1920, .h = 1080, }; - sptr surfaceBuffer = CreateSurfaceBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); + sptr surfaceBuffer = CreateBuffer(OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102, 1920, 1080); EXPECT_NE(surfaceBuffer, nullptr); if (surfaceBuffer == nullptr) { return; @@ -644,4 +662,8 @@ HWTEST_F(ContrastEnhancerUnitTest, Get_Algo_Failed, TestSize.Level1) EXPECT_NE(ret, VPE_ALGO_ERR_OK); ret = contrastEnhancer->UpdateMetadataBasedOnHist(displayArea, surfaceBuffer, pixelmapInfo); EXPECT_NE(ret, VPE_ALGO_ERR_OK); -} \ No newline at end of file +} + +} // namespace VideoProcessingEngine +} // namespace Media +} // namespace OHOS \ No newline at end of file diff --git a/test/unittest/detail_enhancer/detail_enhancer_unit_test.cpp b/test/unittest/detail_enhancer/detail_enhancer_unit_test.cpp index 37ae5b004a164e12c79492939bd43202f63bbb2e..bdc1a2b0fbdec1bc8984264c962839b3c9590130 100644 --- a/test/unittest/detail_enhancer/detail_enhancer_unit_test.cpp +++ b/test/unittest/detail_enhancer/detail_enhancer_unit_test.cpp @@ -29,6 +29,7 @@ #include "detailEnh_sample.h" #include "detailEnh_sample_define.h" #include "detail_enhancer_image.h" +#include "detail_enhancer_image_fwk.h" using namespace std; using namespace testing::ext; @@ -1395,6 +1396,88 @@ HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_60, TestSize.Level1) EXPECT_NE(ret, VPE_ALGO_ERR_OK); } +// reset protection when there is no algo +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_61, TestSize.Level1) +{ + std::shared_ptr impl = std::make_shared(IMAGE); + EXPECT_NE(impl, nullptr); + if (impl == nullptr) { + printf("failed to init DetailEnhancerImage"); + return; + } + VPEAlgoErrCode err = impl->ResetProtectionStatus(); + EXPECT_EQ(err, VPE_ALGO_ERR_OK); +} + +// reset protection when there is algo +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_62, TestSize.Level1) +{ + std::shared_ptr detailEnh = std::make_shared(IMAGE); + EXPECT_NE(detailEnh, nullptr); + if (detailEnh == nullptr) { + printf("failed to init DetailEnhancerImage"); + return; + } + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 1024; + int32_t inputHeight = 768; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + VPEAlgoErrCode err = detailEnh->ResetProtectionStatus(); + EXPECT_EQ(err, VPE_ALGO_ERR_OK); +} + +// enable protection when there is no algo +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_63, TestSize.Level1) +{ + std::shared_ptr impl = std::make_shared(IMAGE); + EXPECT_NE(impl, nullptr); + if (impl == nullptr) { + printf("failed to init DetailEnhancerImage"); + return; + } + VPEAlgoErrCode err = impl->EnableProtection(true); + EXPECT_EQ(err, VPE_ALGO_ERR_OK); +} + +// enable protection when there is algo +HWTEST_F(DetailEnhancerUnitTest, detailenhancer_process_64, TestSize.Level1) +{ + std::shared_ptr detailEnh = std::make_shared(IMAGE); + EXPECT_NE(detailEnh, nullptr); + if (detailEnh == nullptr) { + printf("failed to init DetailEnhancerImage"); + return; + } + DetailEnhancerParameters param { + .uri = "", + .level = DETAIL_ENH_LEVEL_HIGH, + }; + detailEnh->SetParameter(param); + int32_t inputWidth = 1024; + int32_t inputHeight = 768; + int32_t outputWidth = 2048; + int32_t outputHeight = 1536; + int32_t inputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + int32_t outputFormat = OHOS::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; + auto input = CreateSurfaceBuffer(inputFormat, inputWidth, inputHeight); + auto output = CreateSurfaceBuffer(outputFormat, outputWidth, outputHeight); + int32_t ret = detailEnh->Process(input, output); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); + VPEAlgoErrCode err = detailEnh->EnableProtection(true); + EXPECT_EQ(err, VPE_ALGO_ERR_OK); +} + // check extension extream vision engine, process YCRCB_420_SP, aisr MOVED TO VPE_EXT HWTEST_F(DetailEnhancerUnitTest, extream_vision_engine_process_01, TestSize.Level1) { diff --git a/test/unittest/detail_enhancer_video/detail_enhancer_video_unit_test.cpp b/test/unittest/detail_enhancer_video/detail_enhancer_video_unit_test.cpp index 2c332f89f4b170fd37619750e1f4a24be8dbd693..356cb4aeeef92f3116b6d5a87f973561c0c6562d 100644 --- a/test/unittest/detail_enhancer_video/detail_enhancer_video_unit_test.cpp +++ b/test/unittest/detail_enhancer_video/detail_enhancer_video_unit_test.cpp @@ -25,6 +25,7 @@ #include "algorithm_common.h" #include "algorithm_errors.h" +#include "algorithm_video_impl.h" #include "detail_enhancer_video_impl.h" #include "detail_enhancer_video.h" @@ -472,6 +473,22 @@ HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_23, TestSize.Level1) detailEnhVideo->Release(); } +// create video fwk +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_24, TestSize.Level1) +{ + std::shared_ptr videoImpl = DetailEnhancerVideoFwk::Create(); + EXPECT_NE(videoImpl, nullptr); +} + +// video impl get parameter +HWTEST_F(DetailEnhancerVideoUnitTest, detailenhancer_25, TestSize.Level1) +{ + std::shared_ptr videoImpl = DetailEnhancerVideoFwk::Create(); + EXPECT_NE(videoImpl, nullptr); + Format parameter; + VPEAlgoErrCode ret = videoImpl->GetParameter(parameter); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} } // namespace VideoProcessingEngine } // namespace Media } // namespace OHOS diff --git a/test/unittest/image_processing/BUILD.gn b/test/unittest/image_processing/BUILD.gn index 947fe98f617cf300cc87745ca708c5dea0a5cc91..b83b72bd3ce0ef36efccc1f96b0866f425638eb9 100644 --- a/test/unittest/image_processing/BUILD.gn +++ b/test/unittest/image_processing/BUILD.gn @@ -36,6 +36,10 @@ ohos_unittest("image_processing_unit_test") { "$FRAMEWORK_DIR/capi/image_processing/detail_enhancer/include", "$INTERFACES_DIR/kits/c", "$FRAMEWORK_DIR/capi/image_processing/include", + "$FRAMEWORK_DIR/capi/image_processing/colorspace_converter/include", + "$FRAMEWORK_DIR/capi/image_processing/metadata_generator/include", + "$DFX_DIR/include", + "$SERVICES_DIR/include", ] sources = [ "image_processing_unit_test.cpp" ] diff --git a/test/unittest/image_processing/image_processing_unit_test.cpp b/test/unittest/image_processing/image_processing_unit_test.cpp index 0691a0333d07552ac38c3bc0f367af642a5490b5..d511a6327a8574918dc36460c41a439c5ee7834c 100644 --- a/test/unittest/image_processing/image_processing_unit_test.cpp +++ b/test/unittest/image_processing/image_processing_unit_test.cpp @@ -20,6 +20,7 @@ #include "native_avformat.h" #include "pixelmap_native.h" #include "image_processing_factory.h" +#include "image_processing_capi_capability.h" using namespace std; using namespace testing::ext; @@ -66,7 +67,21 @@ void CreateEmptyPixelmap(OH_PixelmapNative** pixelMap, int32_t width, int32_t he (void)OH_PixelmapNative_CreateEmptyPixelmap(options, pixelMap); } -HWTEST_F(ImageProcessingUnitTest, create_instance_01, TestSize.Level1) +HWTEST_F(ImageProcessingUnitTest, deinitialize_01, TestSize.Level1) +{ + auto ret = OH_ImageProcessing_DeinitializeEnvironment(); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, create_instance_01, TestSize.Level0) +{ + ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); + OH_ImageProcessing* instance = nullptr; + ret = OH_ImageProcessing_Create(&instance, IMAGE_PROCESSING_TYPE_DETAIL_ENHANCER); + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, create_instance_02, TestSize.Level1) { ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); OH_ImageProcessing* instance = nullptr; @@ -80,7 +95,13 @@ HWTEST_F(ImageProcessingUnitTest, deinitialize_02, TestSize.Level1) EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); } -HWTEST_F(ImageProcessingUnitTest, create_instance_02, TestSize.Level1) +HWTEST_F(ImageProcessingUnitTest, deinitialize_03, TestSize.Level1) +{ + auto ret = OH_ImageProcessing_DeinitializeEnvironment(); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, create_instance_03, TestSize.Level1) { ImageProcessing_ErrorCode ret = OH_ImageProcessing_InitializeEnvironment(); OH_ImageProcessing* instance = nullptr; @@ -316,6 +337,7 @@ HWTEST_F(ImageProcessingUnitTest, process_07, TestSize.Level1) EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); OH_ImageProcessing_DeinitializeEnvironment(); } + HWTEST_F(ImageProcessingUnitTest, process_08, TestSize.Level1) { OH_ImageProcessing_InitializeEnvironment(); @@ -334,6 +356,7 @@ HWTEST_F(ImageProcessingUnitTest, process_08, TestSize.Level1) EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); OH_ImageProcessing_DeinitializeEnvironment(); } + HWTEST_F(ImageProcessingUnitTest, process_09, TestSize.Level1) { OH_ImageProcessing_InitializeEnvironment(); @@ -350,6 +373,7 @@ HWTEST_F(ImageProcessingUnitTest, process_09, TestSize.Level1) EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); OH_ImageProcessing_DeinitializeEnvironment(); } + HWTEST_F(ImageProcessingUnitTest, process_10, TestSize.Level1) { OH_ImageProcessing_InitializeEnvironment(); @@ -364,6 +388,38 @@ HWTEST_F(ImageProcessingUnitTest, process_10, TestSize.Level1) EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); OH_ImageProcessing_DeinitializeEnvironment(); } + +HWTEST_F(ImageProcessingUnitTest, process_11, TestSize.Level1) +{ + ImageProcessingCapiCapability instance; + instance.LoadLibrary(); + instance.LoadLibrary(); + int ret = IMAGE_PROCESSING_SUCCESS; + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, process_12, TestSize.Level1) +{ + ImageProcessingCapiCapability instance; + instance.UnloadLibrary(); + int ret = IMAGE_PROCESSING_SUCCESS; + EXPECT_EQ(ret, IMAGE_PROCESSING_SUCCESS); +} + +HWTEST_F(ImageProcessingUnitTest, create_image_processing_01, TestSize.Level1) +{ + OH_ImageProcessing* instance = nullptr; + ImageProcessing_ErrorCode ret = OH_ImageProcessing_Create(&instance, 100); + ImageProcessingCapiCapability imageProcessingCapiCapability; + imageProcessingCapiCapability.UnloadLibrary(); + imageProcessingCapiCapability.LoadLibrary(); + imageProcessingCapiCapability.UnloadLibrary(); + imageProcessingCapiCapability.LoadLibrary(); + imageProcessingCapiCapability.LoadLibrary(); + imageProcessingCapiCapability.UnloadLibrary(); + imageProcessingCapiCapability.UnloadLibrary(); + EXPECT_NE(ret, IMAGE_PROCESSING_SUCCESS); +} } // namespace VideoProcessingEngine } // namespace Media } // namespace OHOS diff --git a/test/unittest/metadata_gen_video_ndk/metadata_gen_video_ndk_unit_test.cpp b/test/unittest/metadata_gen_video_ndk/metadata_gen_video_ndk_unit_test.cpp index 1beb9f5eb35f48dc969ec6d590c41b99a118f3bd..0efe829e15ec38d464c674e23f10bb850ae257c4 100644 --- a/test/unittest/metadata_gen_video_ndk/metadata_gen_video_ndk_unit_test.cpp +++ b/test/unittest/metadata_gen_video_ndk/metadata_gen_video_ndk_unit_test.cpp @@ -204,7 +204,7 @@ HWTEST_F(MetadataGeneratorVideoNdkImplUnitTest, testVideoProcess_testFunSupporte EXPECT_EQ(resultSupported, false); } -HWTEST_F(MetadataGeneratorVideoNdkImplUnitTest, testVideoProcess_testFun, TestSize.Level1) +HWTEST_F(MetadataGeneratorVideoNdkImplUnitTest, testVideoProcess_testFun, TestSize.Level0) { VideoProcessing_ErrorCode ret = OH_VideoProcessing_InitializeEnvironment(); EXPECT_EQ(ret, VIDEO_PROCESSING_SUCCESS); diff --git a/test/unittest/service/BUILD.gn b/test/unittest/service/BUILD.gn index d431e71f6643a026bf8c4f351fc19f143b666b09..13c25242e199541eeeff06606210d628c3898748 100644 --- a/test/unittest/service/BUILD.gn +++ b/test/unittest/service/BUILD.gn @@ -40,9 +40,10 @@ ohos_unittest("services_test") { "$SERVICES_DIR/utils/include/", ] - sources = [ "video_processing_client_test.cpp", - "video_processing_load_callback_test.cpp", - "video_processing_server_test.cpp", + sources = [ "video_processing_client_test.cpp", + "video_processing_load_callback_test.cpp", + "video_processing_server_test.cpp", + "video_processing_algorithm_base_test.cpp", "video_processing_algorithm_factory_test.cpp", "video_processing_algorithm_without_data_test.cpp", "configuration_helper_test.cpp", @@ -59,6 +60,7 @@ ohos_unittest("services_test") { "video_processing_engine:videoprocessingservice", "c_utils:utils", "eventhandler:libeventhandler", + "ffrt:libffrt", "graphic_2d:2d_graphics", "graphic_surface:surface", "hilog:libhilog", @@ -69,6 +71,7 @@ ohos_unittest("services_test") { "media_foundation:media_foundation", "safwk:system_ability_fwk", "samgr:samgr_proxy", + "libxml2:libxml2", ] subsystem_name = "multimedia" diff --git a/test/unittest/service/configuration_helper_test.cpp b/test/unittest/service/configuration_helper_test.cpp index 96963ccf7070b135cc24eb4caa845bff1a39b2b6..bc9ebcf7de86c18ec71328802c3c36a1a96d2c98 100644 --- a/test/unittest/service/configuration_helper_test.cpp +++ b/test/unittest/service/configuration_helper_test.cpp @@ -23,6 +23,8 @@ #include #include #include +#include +#include #include "vpe_log.h" @@ -72,14 +74,14 @@ void ConfigurationHelperTest::TearDown(void) * @tc.number: ConfigurationHelperTest_001 * @tc.desc : Test LoadConfigurationFromXml function when xml file does not exist. */ -TEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnFalse_WhenXmlFileNotExist) +HWTEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnFalse_WhenXmlFileNotExist, TestSize.Level0) { ConfigurationHelper helper; std::string xmlFilePath = "non_existent_file.xml"; EXPECT_FALSE(helper.LoadConfigurationFromXml(xmlFilePath)); } -TEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnFalse_WhenXmlFileIsInvalid) +HWTEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnFalse_WhenXmlFileIsInvalid, TestSize.Level0) { ConfigurationHelper helper; std::string xmlFilePath = "invalid_file.xml"; @@ -94,7 +96,7 @@ TEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnFalse_WhenX * @tc.number: 003 * @tc.desc : Test LoadConfigurationFromXml function when xml file has no root element. */ -TEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnFalse_WhenXmlFileHasNoRootElement) +HWTEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnFalse_WhenXmlFileHasNoRootElement, TestSize.Level0) { ConfigurationHelper helper; std::string xmlFilePath = "no_root_element.xml"; @@ -110,7 +112,7 @@ TEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnFalse_WhenX * @tc.number: 004 * @tc.desc : Test LoadConfigurationFromXml function when xml file is valid. */ -TEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnTrue_WhenXmlFileIsValid) +HWTEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnTrue_WhenXmlFileIsValid, TestSize.Level0) { ConfigurationHelper helper; std::string xmlFilePath = "valid_file.xml"; @@ -120,6 +122,129 @@ TEST_F(ConfigurationHelperTest, LoadConfigurationFromXml_ShouldReturnTrue_WhenXm file.close(); EXPECT_TRUE(helper.LoadConfigurationFromXml(xmlFilePath)); } + +HWTEST_F(ConfigurationHelperTest, GetElementByName_Success_WhenInputIsValid, TestSize.Level0) +{ + // Initialize libxml2 + xmlInitParser(); + // Create new xml doc and node structure + xmlDocPtr doc = xmlNewDoc(reinterpret_cast("1.0")); + xmlNodePtr root_node = xmlNewNode(nullptr, reinterpret_cast("root")); + xmlDocSetRootElement(doc, root_node); + xmlNodePtr child_node = xmlNewNode(nullptr, reinterpret_cast("child")); + xmlNewProp(child_node, reinterpret_cast("name"), reinterpret_cast("test")); + xmlAddChild(root_node, child_node); + const xmlNode* root = xmlDocGetRootElement(doc); + + ConfigurationHelper helper; + const xmlNode* result = helper.GetElementByName(*root, "child", "test"); + + xmlFreeDoc(doc); + xmlCleanupParser(); + EXPECT_TRUE(result != nullptr); +} + +HWTEST_F(ConfigurationHelperTest, GetElementByName_Fail_WhenInputIsInvalid, TestSize.Level0) +{ + // Initialize libxml2 + xmlInitParser(); + // Create new xml doc and node structure + xmlDocPtr doc = xmlNewDoc(reinterpret_cast("1.0")); + xmlNodePtr root_node = xmlNewNode(nullptr, reinterpret_cast("root")); + xmlDocSetRootElement(doc, root_node); + xmlNodePtr child_node = xmlNewNode(nullptr, reinterpret_cast("child")); + xmlNewProp(child_node, reinterpret_cast("name"), reinterpret_cast("test")); + xmlAddChild(root_node, child_node); + const xmlNode* root = xmlDocGetRootElement(doc); + + ConfigurationHelper helper; + const xmlNode* result = helper.GetElementByName(*root, "child", "test1"); + + xmlFreeDoc(doc); + xmlCleanupParser(); + EXPECT_TRUE(result == nullptr); +} + +HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnTrue_WhenInputIsValid_01, TestSize.Level0) +{ + // Initialize libxml2 + xmlInitParser(); + // Create new xml doc and node structure + xmlChar *xmlData = (xmlChar*) "12345"; + xmlDocPtr doc = xmlReadMemory((char *)xmlData, strlen((char *)xmlData), "test.xml", NULL, 0); + xmlNodePtr root = xmlDocGetRootElement(doc); + + ConfigurationHelper helper; + int value = 0; + bool result = helper.GetElementValue(*root, "element", value); + EXPECT_TRUE(result); + EXPECT_TRUE(value == 12345); +} + +HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnTrue_WhenInputIsValid_02, TestSize.Level0) +{ + // Initialize libxml2 + xmlInitParser(); + // Create new xml doc and node structure + xmlChar *xmlData = (xmlChar*) "12345"; + xmlDocPtr doc = xmlReadMemory((char *)xmlData, strlen((char *)xmlData), "test.xml", NULL, 0); + xmlNodePtr root = xmlDocGetRootElement(doc); + + ConfigurationHelper helper; + uint32_t value = 0; + bool result = helper.GetElementValue(*root, "element", value); + EXPECT_TRUE(result); + EXPECT_TRUE(value == 12345); +} + +HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnTrue_WhenInputIsValid_03, TestSize.Level0) +{ + // Initialize libxml2 + xmlInitParser(); + // Create new xml doc and node structure + xmlChar *xmlData = (xmlChar*) "12345"; + xmlDocPtr doc = xmlReadMemory((char *)xmlData, strlen((char *)xmlData), "test.xml", NULL, 0); + xmlNodePtr root = xmlDocGetRootElement(doc); + + ConfigurationHelper helper; + uint64_t value = 0; + bool result = helper.GetElementValue(*root, "element", value); + EXPECT_TRUE(result); + EXPECT_TRUE(value == 12345); +} + +HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnTrue_WhenInputIsValid_04, TestSize.Level0) +{ + // Initialize libxml2 + xmlInitParser(); + // Create new xml doc and node structure + xmlChar *xmlData = (xmlChar*) "true"; + xmlDocPtr doc = xmlReadMemory((char *)xmlData, strlen((char *)xmlData), "test.xml", NULL, 0); + xmlNodePtr root = xmlDocGetRootElement(doc); + + ConfigurationHelper helper; + bool value = 0; + bool result = helper.GetElementValue(*root, "element", value); + EXPECT_TRUE(result); + EXPECT_TRUE(value); +} + +HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnTrue_WhenInputIsValid_05, TestSize.Level0) +{ + // Initialize libxml2 + xmlInitParser(); + // Create new xml doc and node structure + xmlChar *xmlData = (xmlChar*) "12345"; + xmlDocPtr doc = xmlReadMemory((char *)xmlData, strlen((char *)xmlData), "test.xml", NULL, 0); + xmlNodePtr root = xmlDocGetRootElement(doc); + + ConfigurationHelper helper; + float value = 0; + bool result = helper.GetElementValue(*root, "element", value); + EXPECT_TRUE(result); + EXPECT_TRUE(value == 12345); +} + /** * @tc.name : ParseXml_ShouldReturnTrue_WhenValidXmlNode * @tc.number: ConfigurationHelperTest_001 @@ -224,6 +349,42 @@ HWTEST_F(ConfigurationHelperTest, GetElementName_ShouldReturnEmpty_WhenNameIsNul EXPECT_EQ(result, ""); } + +HWTEST_F(ConfigurationHelperTest, GetElementName_ShouldReturnEmpty_WhenNameIsNull_2, TestSize.Level0) +{ + // Arrange + xmlNode myself; + myself.name = nullptr; + ConfigurationHelper helper; + const std::string tag = "test"; + // Act + std::string result = helper.GetElementName(myself, tag); + + // Assert + EXPECT_EQ(result, ""); +} + +HWTEST_F(ConfigurationHelperTest, GetElementName_Success_WhenInputIsValid, TestSize.Level0) +{ + // Initialize libxml2 + xmlInitParser(); + // Create new xml doc and node structure + xmlDocPtr doc = xmlNewDoc(reinterpret_cast("1.0")); + xmlNodePtr root_node = xmlNewNode(nullptr, reinterpret_cast("root")); + xmlDocSetRootElement(doc, root_node); + xmlNodePtr child_node = xmlNewNode(nullptr, reinterpret_cast("child")); + xmlNewProp(child_node, reinterpret_cast("name"), reinterpret_cast("100")); + xmlAddChild(root_node, child_node); + const xmlNode* root = xmlDocGetRootElement(doc); + + ConfigurationHelper helper; + const std::string result = helper.GetElementName(*root, "child"); + + xmlFreeDoc(doc); + xmlCleanupParser(); + EXPECT_TRUE(result == "100"); +} + HWTEST_F(ConfigurationHelperTest, GetElementText_ShouldReturnCorrectText_WhenParentAndTagAreValid, TestSize.Level0) { // Arrange @@ -321,7 +482,6 @@ HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnFalse_WhenTextIsEm EXPECT_FALSE(helper.GetElementValue(parent, tag, value)); } - HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnTrue_WhenValueIsNotEmpty, TestSize.Level0) { // Arrange @@ -337,6 +497,7 @@ HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnTrue_WhenValueIsNo EXPECT_FALSE(result); EXPECT_NE(value, "testValue"); } + HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnFalse_WhenValueIsEmpty, TestSize.Level0) { // Arrange @@ -352,6 +513,7 @@ HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnFalse_WhenValueIsE EXPECT_FALSE(result); EXPECT_EQ(value, ""); } + HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnTrue_WhenValueIsNotEmpty01, TestSize.Level0) { // Arrange @@ -388,7 +550,7 @@ HWTEST_F(ConfigurationHelperTest, GetElementValue_ShouldReturnFalse_WhenValueIsE * @tc.number: 001 * @tc.desc : Test scenario where the name is null and the function should return null. */ -TEST_F(ConfigurationHelperTest, GetElementByName_ShouldReturnNull_WhenNameIsNull) +HWTEST_F(ConfigurationHelperTest, GetElementByName_ShouldReturnNull_WhenNameIsNull, TestSize.Level0) { ConfigurationHelper helper; xmlNode parent; @@ -405,7 +567,7 @@ TEST_F(ConfigurationHelperTest, GetElementByName_ShouldReturnNull_WhenNameIsNull * @tc.number: 002 * @tc.desc : Test scenario where the name is not null and the function should return a node. */ -TEST_F(ConfigurationHelperTest, GetElementByName_ShouldReturnNode_WhenNameIsNotNull) +HWTEST_F(ConfigurationHelperTest, GetElementByName_ShouldReturnNode_WhenNameIsNotNull, TestSize.Level0) { ConfigurationHelper helper; xmlNode parent; @@ -422,36 +584,33 @@ TEST_F(ConfigurationHelperTest, GetElementByName_ShouldReturnNode_WhenNameIsNotN * @tc.number: 003 * @tc.desc : Test scenario where the tag is empty and the function should return null. */ -TEST_F(ConfigurationHelperTest, GetElementByName_ShouldReturnNull_WhenTagIsEmpty) +HWTEST_F(ConfigurationHelperTest, GetElementByName_ShouldReturnNull_WhenTagIsEmpty, TestSize.Level0) { - ConfigurationHelper helper; - xmlNode parent; - std::string tag = ""; - std::string name = "testName"; + // 初始化 libxml2 库 + xmlInitParser(); - const xmlNode* result = helper.GetElementByName(parent, tag, name); + // 创建一个新的文档 + xmlDocPtr doc = xmlNewDoc(BAD_CAST "1.0"); - EXPECT_EQ(result, nullptr); -} + // 创建根元素 + xmlNodePtr root_node = xmlNewNode(NULL, BAD_CAST "root"); + xmlDocSetRootElement(doc, root_node); -/** - * @tc.name : GetElementByName_ShouldReturnNull_WhenTagIsNull - * @tc.number: 004 - * @tc.desc : Test scenario where the tag is null and the function should return null. - */ -TEST_F(ConfigurationHelperTest, GetElementByName_ShouldReturnNull_WhenTagIsNull) -{ + // 创建一个没有 name 属性的子元素 + xmlNodePtr child_node = xmlNewChild(root_node, NULL, BAD_CAST "child", NULL); + + // 创建 ConfigurationHelper 实例 ConfigurationHelper helper; - xmlNode parent; - std::string tag = ""; - std::string name = "testName"; - const xmlNode* result = helper.GetElementByName(parent, tag, name); + // 调用 GetElementByName 方法 + const xmlNode* result = helper.GetElementByName(*child_node, "child", "someName"); EXPECT_EQ(result, nullptr); + // 清理资源 + xmlFreeDoc(doc); + xmlCleanupParser(); } - } } } diff --git a/test/unittest/service/video_processing_algorithm_base_test.cpp b/test/unittest/service/video_processing_algorithm_base_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..dcb911f15e1962d660c36e5db8d67f932b0f821b --- /dev/null +++ b/test/unittest/service/video_processing_algorithm_base_test.cpp @@ -0,0 +1,562 @@ +/* + * Copyright (c) 2025 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define private public +#define protected public + +#include "gtest/gtest.h" + +#include "video_processing_algorithm_base.h" + +#include + +#include "algorithm_errors.h" +#include "vpe_log.h" + +// NOTE: Add header file of static algorithm which would be called by VPE SA below: +// algorithm begin +// algorithm end +using namespace std; +using namespace testing::ext; + +using namespace OHOS; +using namespace OHOS::Media::VideoProcessingEngine; + +namespace OHOS { +namespace Media { +namespace VideoProcessingEngine { + +class VideoProcessingAlgorithmTestImpl : public VideoProcessingAlgorithmBase { +public: + int OnInitializeLocked() override; + int OnDeinitializeLocked() override; + void SetTestFlag(bool flag); + +protected: + VideoProcessingAlgorithmTestImpl(const std::string& feature, uint32_t id) : + VideoProcessingAlgorithmBase(feature, id) {} + virtual ~VideoProcessingAlgorithmTestImpl() = default; + VideoProcessingAlgorithmTestImpl(const VideoProcessingAlgorithmTestImpl&) = delete; + VideoProcessingAlgorithmTestImpl& operator=(const VideoProcessingAlgorithmTestImpl&) = delete; + VideoProcessingAlgorithmTestImpl(VideoProcessingAlgorithmTestImpl&&) = delete; + VideoProcessingAlgorithmTestImpl& operator=(VideoProcessingAlgorithmTestImpl&&) = delete; + + bool testFlag{true}; +}; + +int VideoProcessingAlgorithmTestImpl::OnInitializeLocked() +{ + if (!testFlag) { + return VPE_ALGO_ERR_INVALID_PARAM; + } + return VPE_ALGO_ERR_OK; +} + +int VideoProcessingAlgorithmTestImpl::OnDeinitializeLocked() +{ + if (!testFlag) { + return VPE_ALGO_ERR_INVALID_PARAM; + } + return VPE_ALGO_ERR_OK; +} + +void VideoProcessingAlgorithmTestImpl::SetTestFlag(bool flag) +{ + testFlag = flag; +} + +class VideoProcessingAlgorithmBaseTest : public testing::Test { +public: + static void SetUpTestCase(void); + static void TearDownTestCase(void); + void SetUp(); + void TearDown(); +}; + +void VideoProcessingAlgorithmBaseTest::SetUpTestCase(void) +{ + cout << "[SetUpTestCase]: " << endl; +} + +void VideoProcessingAlgorithmBaseTest::TearDownTestCase(void) +{ + cout << "[TearDownTestCase]: " << endl; +} + +void VideoProcessingAlgorithmBaseTest::SetUp(void) +{ + cout << "[SetUp]: SetUp!!!" << endl; +} + +void VideoProcessingAlgorithmBaseTest::TearDown(void) +{ + cout << "[TearDown]: over!!!" << endl; +} + +/** + * @tc.name : Initialize_Success + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_Initialize_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the client id is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, Initialize_Success, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + int ret = algo.Initialize(); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : Initialize_Fail + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_Initialize_002 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the client id is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, Initialize_Fail, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + algo.SetTestFlag(false); + int ret = algo.Initialize(); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : Deinitialize_Success + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_Deinitialize_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the client id is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, Deinitialize_Success, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + algo.isInitialized_ = true; + int ret = algo.Deinitialize(); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : Deinitialize_Fail + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_Deinitialize_002 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the client id is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, Deinitialize_Fail, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + algo.isInitialized_ = true; + algo.SetTestFlag(false); + int ret = algo.Deinitialize(); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : Add_Success_WhenClientIdIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_Add_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the client id is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, Add_Success_WhenClientIdIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + std::string clientName = "client0"; + uint32_t clientID = 0; + int ret = algo.Add(clientName, clientID); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : Del_Success_WhenClientIdIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_Del_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the client id is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, Del_Success_WhenClientIdIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + int ret = algo.Del(clientID); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : UpdateMetadata_Fail_WhenClientIdIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_UpdateMetadata_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when when the input is Invalid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, UpdateMetadata_Fail_WhenClientIdIsInvalid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + int ret = algo.UpdateMetadata(clientID, buffer); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : Process_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_Process_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, Process_Success_WhenInputIsValid_001, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + int ret = algo.Process(clientID, buffer, buffer); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : Process_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_Process_002 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is invalid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, Process_Success_WhenInputIsInvalid_002, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.Process(clientID, buffer, bufferInvalid); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : Process_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_Process_003 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is invalid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, Process_Success_WhenInputIsInvalid_003, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.Process(clientID, bufferInvalid, buffer); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : Process_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_Process_004 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is invalid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, Process_Success_WhenInputIsInvalid_004, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo bufferInvalid; + int ret = algo.Process(clientID, bufferInvalid, bufferInvalid); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : ComposeImage_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_ComposeImage_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, ComposeImage_Fail_WhenInputIsInvalid_001, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.ComposeImage(clientID, bufferInvalid, buffer, buffer, true); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : ComposeImage_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_ComposeImage_002 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, ComposeImage_Fail_WhenInputIsInvalid_002, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.ComposeImage(clientID, buffer, bufferInvalid, buffer, true); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : ComposeImage_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_ComposeImage_003 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, ComposeImage_Fail_WhenInputIsInvalid_003, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.ComposeImage(clientID, buffer, buffer, bufferInvalid, true); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : ComposeImage_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_ComposeImage_004 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, ComposeImage_Fail_WhenInputIsInvalid_004, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.ComposeImage(clientID, bufferInvalid, bufferInvalid, buffer, true); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : ComposeImage_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_ComposeImage_005 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, ComposeImage_Fail_WhenInputIsInvalid_005, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.ComposeImage(clientID, buffer, bufferInvalid, bufferInvalid, true); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : ComposeImage_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_ComposeImage_006 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, ComposeImage_Fail_WhenInputIsInvalid_006, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.ComposeImage(clientID, bufferInvalid, buffer, bufferInvalid, true); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : ComposeImage_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_ComposeImage_007 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, ComposeImage_Fail_WhenInputIsInvalid_007, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo bufferInvalid; + int ret = algo.ComposeImage(clientID, bufferInvalid, bufferInvalid, bufferInvalid, true); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : DecomposeImage_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_DecomposeImage_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, DecomposeImage_Success_WhenInputIsValid_001, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + int ret = algo.DecomposeImage(clientID, buffer, buffer, buffer); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : DecomposeImage_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_DecomposeImage_002 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, DecomposeImage_Fail_WhenInputIsInvalid_002, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.DecomposeImage(clientID, bufferInvalid, buffer, buffer); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : DecomposeImage_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_DecomposeImage_003 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, DecomposeImage_Fail_WhenInputIsInvalid_003, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.DecomposeImage(clientID, buffer, bufferInvalid, buffer); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : DecomposeImage_Fail_WhenInputIsInvalid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_DecomposeImage_004 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, DecomposeImage_Fail_WhenInputIsInvalid_004, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + buffer.surfacebuffer = SurfaceBuffer::Create(); + SurfaceBufferInfo bufferInvalid; + int ret = algo.DecomposeImage(clientID, buffer, buffer, bufferInvalid); + EXPECT_EQ(ret, VPE_ALGO_ERR_INVALID_PARAM); +} + +/** + * @tc.name : SetParameter_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_SetParameter_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, SetParameter_Success_WhenInputIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + int tag = 0; + std::vector parameter; + int ret = algo.SetParameter(clientID, tag, parameter); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : GetParameter_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_GetParameter_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, GetParameter_Success_WhenInputIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + int tag = 0; + std::vector parameter; + int ret = algo.GetParameter(clientID, tag, parameter); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : AddClientIDLocked_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_AddClientIDLocked_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, AddClientIDLocked_Success_WhenInputIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + std::string clientName = "client0"; + uint32_t clientID = 0; + int ret = algo.AddClientIDLocked(clientName, clientID); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : DelClientIDLocked_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_DelClientIDLocked_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, DelClientIDLocked_Success_WhenInputIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + std::string clientName = "client0"; + uint32_t clientID = 0; + bool isEmpty = true; + int ret = algo.DelClientIDLocked(clientID, isEmpty); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : ClearClientsLocked_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_ClearClientsLocked_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, ClearClientsLocked_Success_WhenInputIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + algo.ClearClientsLocked(); +} + +/** + * @tc.name : DoUpdateMetadata_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_ClearClientsLocked_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, DoUpdateMetadata_Success_WhenInputIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + int ret = algo.DoUpdateMetadata(clientID, buffer); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : DoProcess_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_DoProcess_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, DoProcess_Success_WhenInputIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + int ret = algo.DoProcess(clientID, buffer, buffer); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : DoComposeImage_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_DoComposeImage_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, DoComposeImage_Success_WhenInputIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + int ret = algo.DoComposeImage(clientID, buffer, buffer, buffer, true); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +/** + * @tc.name : DoDecomposeImage_Success_WhenInputIsValid + * @tc.number: VideoProcessingEngine_VideoProcessingAlgorithmBase_DoDecomposeImage_001 + * @tc.desc : Test VideoProcessingAlgorithmBase method when the input is valid. + */ +HWTEST_F(VideoProcessingAlgorithmBaseTest, DoDecomposeImage_Success_WhenInputIsValid, TestSize.Level0) +{ + OHOS::Media::VideoProcessingEngine::VideoProcessingAlgorithmTestImpl algo("test", 0); + uint32_t clientID = 0; + SurfaceBufferInfo buffer; + int ret = algo.DoDecomposeImage(clientID, buffer, buffer, buffer); + EXPECT_EQ(ret, VPE_ALGO_ERR_OK); +} + +} +} +} \ No newline at end of file diff --git a/test/unittest/service/video_processing_algorithm_factory_test.cpp b/test/unittest/service/video_processing_algorithm_factory_test.cpp index 8e975e32bcb947595f80c190de3e888074780758..e445330d43b08c24f3de65600618a20b1b5a0607 100644 --- a/test/unittest/service/video_processing_algorithm_factory_test.cpp +++ b/test/unittest/service/video_processing_algorithm_factory_test.cpp @@ -165,8 +165,8 @@ HWTEST_F(VideoProcessingAlgorithmFactoryTest, Create_ShouldReturnValidAlgorithm_ { VideoProcessingAlgorithmFactory factory; // Assuming "valid_feature" is a valid feature in g_creators - auto result = factory.Create("valid_feature"); - EXPECT_EQ(result, nullptr); + auto result = factory.Create("AisrImage"); + EXPECT_NE(result, nullptr); } /** diff --git a/test/unittest/service/video_processing_server_test.cpp b/test/unittest/service/video_processing_server_test.cpp index df55fa824c781b1c4924a55ed410ed066e975adb..292c0d9bb8b2beb363ecb8518b9bd9bd057f0208 100644 --- a/test/unittest/service/video_processing_server_test.cpp +++ b/test/unittest/service/video_processing_server_test.cpp @@ -143,6 +143,21 @@ HWTEST_F(VideoProcessingServerTest, LoadInfo_Test_05, TestSize.Level0) VPE_LOGI("LoadInfo_Test_05: end!"); } +/** + * @tc.name : LoadInfo_Test_06 + * @tc.number: VideoProcessingServerTest_06 + * @tc.desc : Test LoadInfo function when the key is invalid. + */ +HWTEST_F(VideoProcessingServerTest, LoadInfo_Test_06, TestSize.Level0) +{ + VPE_LOGI("LoadInfo_Test_05: begin!"); + VideoProcessingServer videoProcessingServer(1, true); + SurfaceBufferInfo bufferInfo; + ErrCode result = videoProcessingServer.LoadInfo(100, bufferInfo); + EXPECT_EQ(result, ERR_INVALID_DATA); + VPE_LOGI("LoadInfo_Test_05: end!"); +} + /** * @tc.name : Create_Success_WhenValidFeatureAndClientName @@ -153,60 +168,30 @@ HWTEST_F(VideoProcessingServerTest, Create_Success_WhenValidFeatureAndClientName { VPE_LOGI("Create_Success_WhenValidFeatureAndClientName: begin!"); VideoProcessingServer server(1, true); - std::string feature = "testFeature"; + std::string feature = "AisrImage"; std::string clientName = "testClient"; - int32_t clientID = 0; - EXPECT_NE(server.Create(feature, clientName, clientID), VPE_ALGO_ERR_OK); + int32_t clientID = 1; + server.Create(feature, clientName, clientID); + EXPECT_EQ(server.Create(feature, clientName, clientID), VPE_ALGO_ERR_OK); VPE_LOGI("Create_Success_WhenValidFeatureAndClientName: end!"); } /** - * @tc.name : Create_Fail_WhenNoMemory - * @tc.number: VideoProcessingServerTest_002 - * @tc.desc : Test Create method when no memory is available. + * @tc.name : Create_Fail_WhenInvalidFeatureAndClientName + * @tc.number: VideoProcessingServerTest_001 + * @tc.desc : Test Create method when invalid feature and clientName are provided. */ -HWTEST_F(VideoProcessingServerTest, Create_Fail_WhenNoMemory, TestSize.Level0) +HWTEST_F(VideoProcessingServerTest, Create_Fail_WhenInvalidFeatureAndClientName, TestSize.Level0) { - VPE_LOGI("Create_Fail_WhenNoMemory: begin!"); + VPE_LOGI("Create_Success_WhenValidFeatureAndClientName: begin!"); VideoProcessingServer server(1, true); - std::string feature = "testFeature"; + std::string feature = "invalidFeature"; std::string clientName = "testClient"; - int32_t clientID = 0; - EXPECT_EQ(server.Create(feature, clientName, clientID), VPE_ALGO_ERR_NO_MEMORY); - VPE_LOGI("Create_Fail_WhenNoMemory: end!"); + int32_t clientID = 2; + EXPECT_NE(server.Create(feature, clientName, clientID), VPE_ALGO_ERR_OK); + VPE_LOGI("Create_Success_WhenValidFeatureAndClientName: end!"); } -/** - * @tc.name : Create_Fail_WhenInitializeFails - * @tc.number: VideoProcessingServerTest_003 - * @tc.desc : Test Create method when initialization fails. - */ -HWTEST_F(VideoProcessingServerTest, Create_Fail_WhenInitializeFails, TestSize.Level0) -{ - VPE_LOGI("Create_Fail_WhenInitializeFails: begin!"); - VideoProcessingServer server(1, true); - std::string feature = "testFeature"; - std::string clientName = "testClient"; - int32_t clientID = 0; - EXPECT_NE(server.Create(feature, clientName, clientID), ERR_INVALID_STATE); - VPE_LOGI("Create_Fail_WhenInitializeFails: end!"); -} - -/** - * @tc.name : Create_Fail_WhenAddClientFails - * @tc.number: VideoProcessingServerTest_004 - * @tc.desc : Test Create method when adding client fails. - */ -HWTEST_F(VideoProcessingServerTest, Create_Fail_WhenAddClientFails, TestSize.Level0) -{ - VPE_LOGI("Create_Fail_WhenAddClientFails: begin!"); - VideoProcessingServer server(1, true); - std::string feature = "testFeature"; - std::string clientName = "testClient"; - int32_t clientID = 0; - EXPECT_NE(server.Create(feature, clientName, clientID), ERR_INVALID_DATA); - VPE_LOGI("Create_Fail_WhenAddClientFails: end!"); -} HWTEST_F(VideoProcessingServerTest, Destroy_ShouldReturnErrOk_WhenDestroyLockedReturnsErrOk, TestSize.Level0) { @@ -214,10 +199,14 @@ HWTEST_F(VideoProcessingServerTest, Destroy_ShouldReturnErrOk_WhenDestroyLockedR // Arrange VideoProcessingServer videoProcessingServer(1, true); // Act - auto result = videoProcessingServer.Destroy(1); + std::string feature = "AisrImage"; + std::string clientName = "testClient"; + int32_t clientID = 1; + videoProcessingServer.Create(feature, clientName, clientID); + auto result = videoProcessingServer.Destroy(clientID); // Assert - EXPECT_NE(result, ERR_OK); + EXPECT_EQ(result, ERR_OK); VPE_LOGI("Destroy_ShouldReturnErrOk_WhenDestroyLockedReturnsErrOk: end!"); } @@ -265,6 +254,7 @@ HWTEST_F(VideoProcessingServerTest, Process_ShouldReturnErrOk_WhenInputOutputNot VideoProcessingServer videoProcessingServer(1, true); SurfaceBufferInfo input; SurfaceBufferInfo output; + input.surfacebuffer = SurfaceBuffer::Create(); output.surfacebuffer = SurfaceBuffer::Create(); EXPECT_NE(videoProcessingServer.Process(1, input, output), ERR_OK); VPE_LOGI("Process_ShouldReturnErrOk_WhenInputOutputNotNull: end!"); @@ -305,15 +295,21 @@ HWTEST_F(VideoProcessingServerTest, ComposeImage_ShouldReturnErrOk_WhenInputIsVa { VPE_LOGI("ComposeImage_ShouldReturnErrOk_WhenInputIsValid: begin!"); // Arrange + VideoProcessingServer videoProcessingServer(1, true); + // Act + std::string feature = "AisrImage"; + std::string clientName = "testClient"; int32_t clientID = 1; + videoProcessingServer.Create(feature, clientName, clientID); + SurfaceBufferInfo inputSdrImage; SurfaceBufferInfo inputGainmap; SurfaceBufferInfo outputHdrImage; + inputSdrImage.surfacebuffer = SurfaceBuffer::Create(); + inputGainmap.surfacebuffer = SurfaceBuffer::Create(); outputHdrImage.surfacebuffer = SurfaceBuffer::Create(); bool legacy = false; - VideoProcessingServer videoProcessingServer(clientID, legacy); - // Act ErrCode result = videoProcessingServer.ComposeImage(clientID, inputSdrImage, inputGainmap, outputHdrImage, legacy); @@ -361,6 +357,9 @@ HWTEST_F(VideoProcessingServerTest, DecomposeImage_ShouldReturnErrOk_WhenInputIs SurfaceBufferInfo inputImage; SurfaceBufferInfo outputSdrImage; SurfaceBufferInfo outputGainmap; + inputImage.surfacebuffer = SurfaceBuffer::Create(); + outputSdrImage.surfacebuffer = SurfaceBuffer::Create(); + outputGainmap.surfacebuffer = SurfaceBuffer::Create(); VideoProcessingServer videoProcessingServer(clientID, true); // Act @@ -567,7 +566,7 @@ HWTEST_F(VideoProcessingServerTest, DelayUnloadTaskLocked_WhenUnloadHandlerNotNu VPE_LOGI("DelayUnloadTaskLocked_WhenUnloadHandlerIsNull: end!"); } -HWTEST_F(VideoProcessingServerTest, ClearAlgorithms_ShouldClearAlgorithms_WhenCalled, TestSize.Level0) +HWTEST_F(VideoProcessingServerTest, ClearAlgorithms_ShouldClearAlgorithms_WhenCalled_01, TestSize.Level0) { VPE_LOGI("ClearAlgorithms_ShouldClearAlgorithms_WhenCalled: begin!"); VideoProcessingServer server(1, true); @@ -583,6 +582,24 @@ HWTEST_F(VideoProcessingServerTest, ClearAlgorithms_ShouldClearAlgorithms_WhenCa VPE_LOGI("ClearAlgorithms_ShouldClearAlgorithms_WhenCalled: end!"); } +HWTEST_F(VideoProcessingServerTest, ClearAlgorithms_ShouldClearAlgorithms_WhenCalled_02, TestSize.Level0) +{ + VPE_LOGI("ClearAlgorithms_ShouldClearAlgorithms_WhenCalled: begin!"); + VideoProcessingServer server(1, true); + // Act + std::string feature = "AisrImage"; + std::string clientName = "testClient"; + int32_t clientID = 1; + server.Create(feature, clientName, clientID); + server.isWorking_ = true; + server.ClearAlgorithms(); + + EXPECT_TRUE(server.algorithms_.empty()); + EXPECT_TRUE(server.clients_.empty()); + EXPECT_FALSE(server.isWorking_.load()); + VPE_LOGI("ClearAlgorithms_ShouldClearAlgorithms_WhenCalled: end!"); +} + TEST_F(VideoProcessingServerTest, SetParameter_ShouldReturnError_WhenInvalidClientID) { VideoProcessingServer videoProcessingServer(1, true); @@ -594,6 +611,7 @@ TEST_F(VideoProcessingServerTest, SetParameter_ShouldReturnError_WhenInvalidClie EXPECT_NE(result, VPE_ALGO_ERR_OK); } + TEST_F(VideoProcessingServerTest, SetParameter_ShouldReturnError_WhenEmptyParameter) { VideoProcessingServer videoProcessingServer(1, true); diff --git a/test/unittest/video_variable_refreshrate_test/video_variable_refreshrate_unit_test.cpp b/test/unittest/video_variable_refreshrate_test/video_variable_refreshrate_unit_test.cpp index d93183d8b3f20b92bee608f5809b15948357b050..dc0d0d2bbb2ca4ad94a70bc45ab12b4f53565de8 100644 --- a/test/unittest/video_variable_refreshrate_test/video_variable_refreshrate_unit_test.cpp +++ b/test/unittest/video_variable_refreshrate_test/video_variable_refreshrate_unit_test.cpp @@ -117,7 +117,7 @@ sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int return buffer; } -HWTEST_F(VideoVariableRefreshRateUnitTest, VideoVariableRefreshRate_init_01, TestSize.Level1) +HWTEST_F(VideoVariableRefreshRateUnitTest, VideoVariableRefreshRate_init_01, TestSize.Level0) { auto vrrPredictor = OHOS::Media::VideoProcessingEngine::VideoRefreshRatePrediction::Create(); VPEAlgoErrCode ret = vrrPredictor->CheckVRRSupport(UT_PROCESS_NAME); diff --git a/test/unittest/vpe_framework/vpe_framework_unit_test.cpp b/test/unittest/vpe_framework/vpe_framework_unit_test.cpp index f4da449c4e0bbdd11ea2a5433122c63290e4b1b5..19899d89dd2ae1919653eda7a92b2c52adb016f3 100644 --- a/test/unittest/vpe_framework/vpe_framework_unit_test.cpp +++ b/test/unittest/vpe_framework/vpe_framework_unit_test.cpp @@ -46,7 +46,7 @@ public: * @tc.name : PluginManager_Init * @tc.desc : Test for PluginManager Init */ -HWTEST_F(VPEFrameworkUnitTest, PluginManager_Init_010101, TestSize.Level1) +HWTEST_F(VPEFrameworkUnitTest, PluginManager_Init_010101, TestSize.Level0) { std::unique_ptr pluginManager = std::make_unique(); ASSERT_EQ(VPE_ERR_OK, pluginManager->Init()); diff --git a/test/utils/DetailEnhancer/sample/detailEnh_sample.cpp b/test/utils/DetailEnhancer/sample/detailEnh_sample.cpp index 2d53fcb305cecbffdd9c09c57e0e311b643d1d8a..d2d14fc92c38dc2a7c281f8dfc581030bf5e405a 100644 --- a/test/utils/DetailEnhancer/sample/detailEnh_sample.cpp +++ b/test/utils/DetailEnhancer/sample/detailEnh_sample.cpp @@ -28,21 +28,53 @@ namespace OHOS { namespace Media { namespace VideoProcessingEngine { -void ReadYuvFile(sptr &buffer, std::unique_ptr &yuvFile, int32_t frameSize) +const float SIZE_COEF_YUV420 = 1.5; +const float SIZE_COEF_RGBA8888 = 4; +const float SIZE_COEF_YUV444 = 3; + +float GetCoef(int32_t format) +{ + float coef = 1.0; + switch (format) { + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_SP: + case OHOS::GRAPHIC_PIXEL_FMT_YCBCR_420_P: + coef = SIZE_COEF_YUV420; + break; + case OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888: + case OHOS::GRAPHIC_PIXEL_FMT_BGRA_8888: + case OHOS::GRAPHIC_PIXEL_FMT_RGBA_1010102: + coef = SIZE_COEF_RGBA8888; + break; + default: + TEST_LOG("Unknow format:%d", format); + coef = SIZE_COEF_YUV444; + break; + } + return coef; +} + +void ReadYuvFile(sptr &buffer, std::unique_ptr &yuvFile, int32_t inputWidth, + int32_t inputHeight, int32_t inputFormat, int stride) { if (buffer == nullptr) { TEST_LOG("null ptr"); return; } - if (frameSize < 0) { - TEST_LOG("Invalid size"); - return; - } if (!yuvFile->is_open()) { TEST_LOG("Yuv file is not open"); return; } - yuvFile->read(reinterpret_cast(buffer->GetVirAddr()), frameSize); + if (inputFormat == OHOS::GRAPHIC_PIXEL_FMT_RGBA_8888) { + for (int i = 0; i < inputHeight; i++) { + yuvFile->read(reinterpret_cast(buffer->GetVirAddr()) + + i * stride, inputWidth * 4); // 4 coef + } + } else { + yuvFile->read(reinterpret_cast(buffer->GetVirAddr()), + static_cast(inputWidth * inputHeight * GetCoef(inputFormat))); + } + + TEST_LOG("stride:%d", stride); } sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height) diff --git a/test/utils/DetailEnhancer/sample/detailEnh_sample.h b/test/utils/DetailEnhancer/sample/detailEnh_sample.h index dad27d17f57ee9714314e17195193d4353a439af..fc1666bdb1e76be457897b49399fee448f0baffa 100644 --- a/test/utils/DetailEnhancer/sample/detailEnh_sample.h +++ b/test/utils/DetailEnhancer/sample/detailEnh_sample.h @@ -23,7 +23,8 @@ namespace OHOS { namespace Media { namespace VideoProcessingEngine { -void ReadYuvFile(sptr &buffer, std::unique_ptr &yuvFile, int32_t frameSize); +void ReadYuvFile(sptr &buffer, std::unique_ptr &yuvFile, int32_t inputWidth, + int32_t inputHeight, int32_t inputFormat, int stride); sptr CreateSurfaceBuffer(uint32_t pixelFormat, int32_t width, int32_t height); } // namespace VideoProcessingEngine } // namespace Media