diff --git a/common/include/utils/dcamera_utils_tools.h b/common/include/utils/dcamera_utils_tools.h index 986ac658b01202015ac473d556804acaccaaccec..095f7257a1c1959a6f682210a3d3d9fb9458c716 100644 --- a/common/include/utils/dcamera_utils_tools.h +++ b/common/include/utils/dcamera_utils_tools.h @@ -18,6 +18,7 @@ #include #include +#include "data_buffer.h" namespace OHOS { namespace DistributedHardware { @@ -30,6 +31,7 @@ int32_t GetAlignedHeight(int32_t width); std::string Base64Encode(const unsigned char *toEncode, unsigned int len); std::string Base64Decode(const std::string& basicString); bool IsBase64(unsigned char c); +void SaveFile(std::string fileName, const std::shared_ptr& buffer); } // namespace DistributedHardware } // namespace OHOS #endif // OHOS_DCAMERA_UTILS_TOOL_H diff --git a/common/src/utils/dcamera_utils_tools.cpp b/common/src/utils/dcamera_utils_tools.cpp index f66a9838837f0d99e73597c6f0fbc11e8d5bed96..3b2e5a95e677348f42535678b23afac435b00d1b 100644 --- a/common/src/utils/dcamera_utils_tools.cpp +++ b/common/src/utils/dcamera_utils_tools.cpp @@ -17,6 +17,7 @@ #include #include +#include #include "distributed_camera_constants.h" #include "distributed_camera_errno.h" @@ -172,5 +173,21 @@ bool IsBase64(unsigned char c) { return (isalnum(c) || (c == '+') || (c == '/')); } + +void SaveFile(std::string fileName, const std::shared_ptr& buffer) +{ + if (fileName.empty() || buffer == nullptr) { + DHLOGE("invalid params."); + return; + } + std::ofstream ofs; + ofs.open(fileName, std::ios::binary | std::ios::out | std::ios::app); + if (!ofs.is_open()) { + DHLOGE("open file failed."); + return; + } + ofs.write((const char*)buffer->Data(), buffer->Size()); + ofs.close(); +} } // namespace DistributedHardware } // namespace OHOS diff --git a/services/data_process/BUILD.gn b/services/data_process/BUILD.gn index d65da7b1242d4d305e7dc4441b712081e4ccfdf6..8545fed00e8541afb96fe14665414c0656bb3524 100644 --- a/services/data_process/BUILD.gn +++ b/services/data_process/BUILD.gn @@ -18,7 +18,6 @@ import( ohos_shared_library("distributed_camera_data_process") { include_dirs = [ - "//third_party/ffmpeg/", "//commonlibrary/c_utils/base/include", "//utils/system/safwk/native/include", "//foundation/graphic/graphic_2d/interfaces/innerkits/common", @@ -35,7 +34,6 @@ ohos_shared_library("distributed_camera_data_process") { "include/utils", "include/pipeline_node/multimedia_codec/decoder", "include/pipeline_node/multimedia_codec/encoder", - "include/pipeline_node/colorspace_conversion", "include/pipeline_node/fpscontroller", "include/pipeline_node/scale_conversion", "${common_path}/include/constants", @@ -47,30 +45,45 @@ ohos_shared_library("distributed_camera_data_process") { "src/pipeline/abstract_data_process.cpp", "src/pipeline/dcamera_pipeline_sink.cpp", "src/pipeline/dcamera_pipeline_source.cpp", - "src/pipeline_node/colorspace_conversion/color_format_process.cpp", "src/pipeline_node/fpscontroller/fps_controller_process.cpp", "src/pipeline_node/multimedia_codec/decoder/decode_surface_listener.cpp", "src/pipeline_node/multimedia_codec/decoder/decode_video_callback.cpp", "src/pipeline_node/multimedia_codec/encoder/encode_data_process.cpp", "src/pipeline_node/multimedia_codec/encoder/encode_video_callback.cpp", - "src/pipeline_node/scale_conversion/scale_convert_process.cpp", "src/utils/image_common_type.cpp", "src/utils/property_carrier.cpp", ] + deps = [ + "${common_path}:distributed_camera_utils", + "${graphicstandard_path}:libsurface", + ] + if (!distributed_camera_common) { - sources += - [ "src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp" ] + include_dirs += [ "//third_party/libyuv/files/include" ] + sources += [ + "src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp", + "src/pipeline_node/scale_conversion/scale_convert_process.cpp", + ] + deps += [ "//third_party/libyuv:yuv" ] } else { - sources += [ "src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp" ] + include_dirs += [ "//third_party/ffmpeg/" ] + sources += [ + "src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp", + "src/pipeline_node/scale_conversion/scale_convert_process_common.cpp", + ] + deps += [ "//third_party/ffmpeg:libohosffmpeg" ] } - deps = [ - "${common_path}:distributed_camera_utils", - "${graphicstandard_path}:libsurface", - "//third_party/ffmpeg:libohosffmpeg", + cflags = [ + "-fPIC", + "-Wall", ] + if (distributed_camera_common) { + cflags += [ "-DDCAMERA_COMMON" ] + } + defines = [ "HI_LOG_ENABLE", "DH_LOG_TAG=\"dcameradataproc\"", diff --git a/services/data_process/include/pipeline_node/colorspace_conversion/color_format_process.h b/services/data_process/include/pipeline_node/colorspace_conversion/color_format_process.h deleted file mode 100644 index df0bbc697f6647a5c706025c19ed6a2585c6bcad..0000000000000000000000000000000000000000 --- a/services/data_process/include/pipeline_node/colorspace_conversion/color_format_process.h +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2022 Huawei Device Co., Ltd. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef OHOS_COLOR_FORMAT_PROCESS_H -#define OHOS_COLOR_FORMAT_PROCESS_H - -#include "securec.h" - -#include "abstract_data_process.h" -#include "data_buffer.h" -#include "dcamera_pipeline_source.h" -#include "image_common_type.h" - -namespace OHOS { -namespace DistributedHardware { -class ColorFormatProcess : public AbstractDataProcess { -public: - explicit ColorFormatProcess(const std::weak_ptr& callbackPipSource) - : callbackPipelineSource_(callbackPipSource) {} - ~ColorFormatProcess() override; - - int32_t InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, - VideoConfigParams& processedConfig) override; - int32_t ProcessData(std::vector>& inputBuffers) override; - void ReleaseProcessNode() override; - - int32_t GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier) override; - -private: - bool IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig); - int32_t GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::shared_ptr& imgBuf); - bool CheckColorProcessInputInfo(const ImageUnitInfo& srcImgInfo); - bool CheckColorConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); - bool IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo); - void SeparateUVPlaneByRow(const uint8_t *srcUVPlane, uint8_t *dstUPlane, uint8_t *dstVPlane, - int32_t srcHalfWidth); - int32_t SeparateNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); - void CombineUVPlaneByRow(const uint8_t *srcUPlane, const uint8_t *srcVPlane, uint8_t *dstUVPlane, - int32_t dstHalfWidth); - int32_t CombineNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); - int32_t CopyYPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); - int32_t ColorConvertNV12ToNV21(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); - int32_t ColorConvertNV12ToI420(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); - int32_t ColorConvertByColorFormat(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); - int32_t ColorFormatDone(std::vector>& outputBuffers); - -private: - constexpr static int32_t YUV_BYTES_PER_PIXEL = 3; - constexpr static int32_t Y2UV_RATIO = 2; - - std::weak_ptr callbackPipelineSource_; - VideoConfigParams sourceConfig_; - VideoConfigParams targetConfig_; - VideoConfigParams processedConfig_; - std::atomic isColorFormatProcess_ = false; -}; -} // namespace DistributedHardware -} // namespace OHOS -#endif // OHOS_COLOR_FORMAT_PROCESS_H diff --git a/services/data_process/include/pipeline_node/scale_conversion/scale_convert_process.h b/services/data_process/include/pipeline_node/scale_conversion/scale_convert_process.h index 7171fb704f190269e6a14bf295286b93daca22f3..007d02edf967fdbdfb23a0ed5bdee6fee4a4372f 100644 --- a/services/data_process/include/pipeline_node/scale_conversion/scale_convert_process.h +++ b/services/data_process/include/pipeline_node/scale_conversion/scale_convert_process.h @@ -18,6 +18,7 @@ #include "abstract_data_process.h" +#ifdef DCAMERA_COMMON #ifdef __cplusplus extern "C" { #endif @@ -28,6 +29,7 @@ extern "C" { #ifdef __cplusplus }; #endif +#endif #include #include @@ -56,12 +58,18 @@ private: bool CheckScaleProcessInputInfo(const ImageUnitInfo& srcImgInfo); bool CheckScaleConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); int32_t GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::shared_ptr& imgBuf); - int32_t ScaleConvert(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); + int32_t ScaleConvert(ImageUnitInfo& srcImgInfo, ImageUnitInfo& dstImgInfo); +#ifdef DCAMERA_COMMON int32_t CopyYUV420SrcData(const ImageUnitInfo& srcImgInfo); int32_t CopyNV12SrcData(const ImageUnitInfo& srcImgInfo); int32_t CopyNV21SrcData(const ImageUnitInfo& srcImgInfo); - int32_t ConvertDone(std::vector>& outputBuffers); AVPixelFormat GetAVPixelFormat(Videoformat colorFormat); +#else + int32_t ConvertNV12ToI420(ImageUnitInfo& srcImgInfo); + int32_t ConvertI420Scale(ImageUnitInfo& srcImgInfo, ImageUnitInfo& dstImgInfo); + int32_t ConvertI420ToNV21(ImageUnitInfo& srcImgInfo, ImageUnitInfo& dstImgInfo); +#endif + int32_t ConvertDone(std::vector>& outputBuffers); private: constexpr static int32_t DATA_LEN = 4; @@ -72,18 +80,20 @@ private: constexpr static int32_t YUV_BYTES_PER_PIXEL = 3; constexpr static int32_t Y2UV_RATIO = 2; +#ifdef DCAMERA_COMMON uint8_t *srcData_[DATA_LEN] = { nullptr }; uint8_t *dstData_[DATA_LEN] = { nullptr }; int32_t srcLineSize_[DATA_LEN] = { 0 }; int32_t dstLineSize_[DATA_LEN] = { 0 }; int32_t dstBuffSize_ = 0; SwsContext *swsContext_ = nullptr; + std::mutex scaleMutex_; +#endif VideoConfigParams sourceConfig_; VideoConfigParams targetConfig_; VideoConfigParams processedConfig_; std::weak_ptr callbackPipelineSource_; std::atomic isScaleConvert_ = false; - std::mutex scaleMutex_; }; } // namespace DistributedHardware } // namespace OHOS diff --git a/services/data_process/src/pipeline/dcamera_pipeline_source.cpp b/services/data_process/src/pipeline/dcamera_pipeline_source.cpp index 0dcb6a25d3dad8d73bcf68d1fecb347be3db08af..89047ffcb20e5121df266dcf36c98e3657c19e61 100644 --- a/services/data_process/src/pipeline/dcamera_pipeline_source.cpp +++ b/services/data_process/src/pipeline/dcamera_pipeline_source.cpp @@ -18,7 +18,6 @@ #include "dcamera_hitrace_adapter.h" #include "distributed_hardware_log.h" -#include "color_format_process.h" #include "decode_data_process.h" #include "fps_controller_process.h" #include "scale_convert_process.h" @@ -104,7 +103,6 @@ int32_t DCameraPipelineSource::InitDCameraPipNodes(const VideoConfigParams& sour pipNodeRanks_.push_back(std::make_shared(eventBusSource_, shared_from_this())); pipNodeRanks_.push_back(std::make_shared(shared_from_this())); - pipNodeRanks_.push_back(std::make_shared(shared_from_this())); if (pipNodeRanks_.size() == 0) { DHLOGD("Creating an empty source pipeline."); pipelineHead_ = nullptr; diff --git a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp index 566769dbaddec6e4c6ac366121f34274bb34f4bd..b1be3192eeb94e939027f035b55a9c17f3c8456f 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp @@ -176,29 +176,7 @@ int32_t DecodeDataProcess::InitDecoderMetadataFormat() return DCAMERA_NOT_FOUND; } - DHLOGI("Init video decoder metadata format. videoformat: %d", processedConfig_.GetVideoformat()); - switch (processedConfig_.GetVideoformat()) { - case Videoformat::YUVI420: - metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::YUVI420); - metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE); - break; - case Videoformat::NV12: - metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV12); - metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE); - break; - case Videoformat::NV21: - metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV21); - metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE); - break; - case Videoformat::RGBA_8888: - metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::RGBA); - metadataFormat_.PutIntValue("max_input_size", MAX_RGB32_BUFFER_SIZE); - break; - default: - DHLOGE("The current pixel format does not support encoding."); - return DCAMERA_NOT_FOUND; - } - + metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV12); metadataFormat_.PutStringValue("codec_mime", processType_); metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth()); metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight()); @@ -541,35 +519,19 @@ void DecodeDataProcess::CopyDecodedImage(const sptr& surBuf, int6 return; } - size_t yuvImageSize = static_cast(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() * - YUV_BYTES_PER_PIXEL / Y2UV_RATIO); + size_t yuvImageSize = static_cast(alignedWidth * alignedHeight * YUV_BYTES_PER_PIXEL / Y2UV_RATIO); std::shared_ptr bufferOutput = std::make_shared(yuvImageSize); uint8_t *addr = static_cast(surBuf->GetVirAddr()); - if (alignedWidth == sourceConfig_.GetWidth() && - alignedHeight == sourceConfig_.GetHeight()) { - errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(), addr, yuvImageSize); - if (err != EOK) { - DHLOGE("memcpy_s surface buffer failed."); - return; - } - } else { - ImageUnitInfo srcImgInfo = { processedConfig_.GetVideoformat(), sourceConfig_.GetWidth(), - sourceConfig_.GetHeight(), alignedWidth, alignedHeight, static_cast(alignedWidth * alignedHeight), - surBuf->GetSize(), addr }; - ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), processedConfig_.GetWidth(), - processedConfig_.GetHeight(), processedConfig_.GetWidth(), processedConfig_.GetHeight(), - processedConfig_.GetWidth() * processedConfig_.GetHeight(), bufferOutput->Size(), bufferOutput->Data() }; - int32_t retRow = CopyYUVPlaneByRow(srcImgInfo, dstImgInfo); - if (retRow != DCAMERA_OK) { - DHLOGE("memcpy_s surface buffer failed."); - return; - } + errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(), addr, yuvImageSize); + if (err != EOK) { + DHLOGE("memcpy_s surface buffer failed."); + return; } bufferOutput->SetInt64(TIME_STAMP_US, timeStamp); bufferOutput->SetInt32("Videoformat", static_cast(processedConfig_.GetVideoformat())); - bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth()); - bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight()); + bufferOutput->SetInt32("alignedWidth", alignedWidth); + bufferOutput->SetInt32("alignedHeight", alignedHeight); bufferOutput->SetInt32("width", processedConfig_.GetWidth()); bufferOutput->SetInt32("height", processedConfig_.GetHeight()); diff --git a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp index 49b3db2b39aae4bd8e288eb78a26bc5b2d8f44e4..1fd5ceefda4bd239afa920592e573b7f46d8d41b 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp @@ -14,7 +14,7 @@ */ #include "decode_data_process.h" - +#include "distributed_camera_constants.h" #include "distributed_hardware_log.h" #include "graphic_common_c.h" @@ -53,9 +53,11 @@ int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const sourceConfig_ = sourceConfig; targetConfig_ = targetConfig; + DHLOGI("cmh DecodeNode. The target video codec type %d, the source video codec type %d.", + targetConfig_.GetVideoCodecType(), sourceConfig_.GetVideoCodecType()); if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) { DHLOGD("Disable DecodeNode. The target video codec type %d is the same as the source video codec type %d.", - sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + targetConfig_.GetVideoCodecType(), sourceConfig_.GetVideoCodecType()); processedConfig_ = sourceConfig; processedConfig = processedConfig_; isDecoderProcess_.store(true); @@ -162,14 +164,17 @@ int32_t DecodeDataProcess::InitDecoderMetadataFormat() processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC); switch (sourceConfig_.GetVideoCodecType()) { case VideoCodecType::CODEC_H264: + DHLOGI("cmh InitDecoderMetadataFormat CODEC_H264."); processType_ = "video/avc"; processedConfig_.SetVideoformat(Videoformat::NV12); break; case VideoCodecType::CODEC_H265: + DHLOGI("cmh InitDecoderMetadataFormat CODEC_H265."); processType_ = "video/hevc"; processedConfig_.SetVideoformat(Videoformat::NV12); break; case VideoCodecType::CODEC_MPEG4_ES: + DHLOGI("cmh InitDecoderMetadataFormat CODEC_MPEG4_ES."); processType_ = "video/mp4v-es"; break; default: @@ -522,6 +527,16 @@ void DecodeDataProcess::GetDecoderOutputBuffer(const sptr& sur DHLOGE("surface buffer size or alignedWidth too long"); return; } + uint8_t *addr = static_cast(surfaceBuffer->GetVirAddr()); + int32_t size = surfaceBuffer->GetSize(); + std::shared_ptr dataBuffer = std::make_shared(size); + errno_t err = memcpy_s(dataBuffer->Data(), dataBuffer->Capacity(), addr, size); + if (err != EOK) { + DHLOGE("memcpy_s surface buffer failed."); + return; + } + std::string fileName = "/data/log/dcamera_decode_output_buffer.yuv"; + SaveFile(fileName, dataBuffer); int32_t alignedHeight = alignedHeight_; DHLOGD("OutputBuffer alignedWidth %d, alignedHeight %d, TimeUs %lld.", alignedWidth, alignedHeight, timeStampUs); CopyDecodedImage(surfaceBuffer, timeStampUs, alignedWidth, alignedHeight); @@ -548,13 +563,15 @@ void DecodeDataProcess::CopyDecodedImage(const sptr& surBuf, int6 return; } - bufferOutput->SetInt64("timeUs", timeStampUs); + bufferOutput->SetInt64(TIME_STAMP_US, timeStampUs); bufferOutput->SetInt32("Videoformat", static_cast(processedConfig_.GetVideoformat())); bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth()); bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight()); bufferOutput->SetInt32("width", processedConfig_.GetWidth()); bufferOutput->SetInt32("height", processedConfig_.GetHeight()); + std::string fileName = "/data/log/dcamera_decode_copy_plane.yuv"; + SaveFile(fileName, bufferOutput); PostOutputDataBuffers(bufferOutput); } diff --git a/services/data_process/src/pipeline_node/scale_conversion/scale_convert_process.cpp b/services/data_process/src/pipeline_node/scale_conversion/scale_convert_process.cpp index 55c2cf24f3bb979e2b18df8f90dc01f9e583df44..7f639efceb3470b7205fdd6f9f00ea9b479903fe 100644 --- a/services/data_process/src/pipeline_node/scale_conversion/scale_convert_process.cpp +++ b/services/data_process/src/pipeline_node/scale_conversion/scale_convert_process.cpp @@ -13,11 +13,13 @@ * limitations under the License. */ +#include "scale_convert_process.h" + +#include "libyuv.h" #include "dcamera_utils_tools.h" #include "distributed_camera_constants.h" #include "distributed_camera_errno.h" #include "distributed_hardware_log.h" -#include "scale_convert_process.h" namespace OHOS { namespace DistributedHardware { @@ -37,36 +39,14 @@ int32_t ScaleConvertProcess::InitNode(const VideoConfigParams& sourceConfig, con targetConfig_ = targetConfig; processedConfig_ = sourceConfig; processedConfig_.SetWidthAndHeight(targetConfig.GetWidth(), targetConfig.GetHeight()); + processedConfig_.SetVideoformat(targetConfig.GetVideoformat()); processedConfig = processedConfig_; if (!IsConvertible(sourceConfig, targetConfig)) { - DHLOGI("sourceConfig: Videoformat %d Width %d, Height %d, targetConfig: Videoformat %d Width %d, Height %d.", + DHLOGI("sourceConfig: Videoformat %d Width %d, Height %d is the same as the targetConfig: " + "Videoformat %d Width %d, Height %d.", sourceConfig.GetVideoformat(), sourceConfig.GetWidth(), sourceConfig.GetHeight(), targetConfig.GetVideoformat(), targetConfig.GetWidth(), targetConfig.GetHeight()); - isScaleConvert_.store(true); - return DCAMERA_OK; - } - - int32_t ret = av_image_alloc(srcData_, srcLineSize_, sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), - GetAVPixelFormat(sourceConfig_.GetVideoformat()), SOURCE_ALIGN); - if (ret < DCAMERA_OK) { - DHLOGE("Could not allocate source image."); - return DCAMERA_BAD_VALUE; - } - - dstBuffSize_ = av_image_alloc(dstData_, dstLineSize_, processedConfig_.GetWidth(), processedConfig_.GetHeight(), - GetAVPixelFormat(processedConfig_.GetVideoformat()), TARGET_ALIGN); - if (dstBuffSize_ < DCAMERA_OK) { - DHLOGE("Could not allocate destination image."); - return DCAMERA_BAD_VALUE; - } - - swsContext_ = sws_getContext(sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), - GetAVPixelFormat(sourceConfig_.GetVideoformat()), processedConfig_.GetWidth(), processedConfig_.GetHeight(), - GetAVPixelFormat(processedConfig_.GetVideoformat()), SWS_FAST_BILINEAR, nullptr, nullptr, nullptr); - if (swsContext_ == nullptr) { - DHLOGE("Create SwsContext failed."); - return DCAMERA_BAD_VALUE; } isScaleConvert_.store(true); @@ -76,7 +56,8 @@ int32_t ScaleConvertProcess::InitNode(const VideoConfigParams& sourceConfig, con bool ScaleConvertProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) { return (sourceConfig_.GetWidth() != targetConfig.GetWidth()) || - (sourceConfig_.GetHeight() != targetConfig.GetHeight()); + (sourceConfig_.GetHeight() != targetConfig.GetHeight()) || + (sourceConfig_.GetVideoformat() != targetConfig.GetVideoformat()); } void ScaleConvertProcess::ReleaseProcessNode() @@ -84,16 +65,6 @@ void ScaleConvertProcess::ReleaseProcessNode() DHLOGI("Start release [%d] node : ScaleConvertNode.", nodeRank_); isScaleConvert_.store(false); - { - std::lock_guard autoLock(scaleMutex_); - if (swsContext_ != nullptr) { - av_freep(&srcData_[0]); - av_freep(&dstData_[0]); - sws_freeContext(swsContext_); - swsContext_ = nullptr; - } - } - if (nextDataProcess_ != nullptr) { nextDataProcess_->ReleaseProcessNode(); nextDataProcess_ = nullptr; @@ -115,9 +86,9 @@ int ScaleConvertProcess::ProcessData(std::vector>& i } if (!IsConvertible(sourceConfig_, processedConfig_)) { - DHLOGD("The target resolution: %dx%d is the same as the source resolution: %dx%d", - processedConfig_.GetWidth(), processedConfig_.GetHeight(), - sourceConfig_.GetWidth(), sourceConfig_.GetHeight()); + DHLOGD("The target resolution: %dx%d format: %d is the same as the source resolution: %dx%d format: %d", + processedConfig_.GetWidth(), processedConfig_.GetHeight(), processedConfig_.GetVideoformat(), + sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), sourceConfig_.GetVideoformat()); return ConvertDone(inputBuffers); } @@ -133,7 +104,9 @@ int ScaleConvertProcess::ProcessData(std::vector>& i return DCAMERA_BAD_VALUE; } - std::shared_ptr dstBuf = std::make_shared(dstBuffSize_); + size_t dstBuffSize = static_cast( + processedConfig_.GetWidth() * processedConfig_.GetHeight() * YUV_BYTES_PER_PIXEL / Y2UV_RATIO); + std::shared_ptr dstBuf = std::make_shared(dstBuffSize); ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), processedConfig_.GetWidth(), processedConfig_.GetHeight(), processedConfig_.GetWidth(), processedConfig_.GetHeight(), processedConfig_.GetWidth() * processedConfig_.GetHeight(), dstBuf->Size(), dstBuf->Data() }; @@ -193,16 +166,16 @@ int32_t ScaleConvertProcess::GetImageUnitInfo(ImageUnitInfo& imgInfo, const std: return DCAMERA_BAD_VALUE; } DHLOGD("ScaleConvertProcess imgBuf info : Videoformat %d, alignedWidth %d, alignedHeight %d, width %d, height %d" + - ", chromaOffset %d, imgSize %d.", imgInfo.colorFormat, imgInfo.width, imgInfo.height, imgInfo.alignedWidth, - imgInfo.alignedHeight, imgInfo.chromaOffset, imgInfo.imgSize); + ", chromaOffset %d, imgSize %d.", imgInfo.colorFormat, imgInfo.alignedWidth, imgInfo.alignedHeight, + imgInfo.width, imgInfo.height, imgInfo.chromaOffset, imgInfo.imgSize); return DCAMERA_OK; } bool ScaleConvertProcess::CheckScaleProcessInputInfo(const ImageUnitInfo& srcImgInfo) { return srcImgInfo.colorFormat == sourceConfig_.GetVideoformat() && - srcImgInfo.alignedWidth == sourceConfig_.GetWidth() && - srcImgInfo.alignedHeight == sourceConfig_.GetHeight() && + srcImgInfo.width == sourceConfig_.GetWidth() && + srcImgInfo.height == sourceConfig_.GetHeight() && IsCorrectImageUnitInfo(srcImgInfo); } @@ -227,9 +200,12 @@ bool ScaleConvertProcess::CheckScaleConvertInfo(const ImageUnitInfo& srcImgInfo, return false; } - if ((dstImgInfo.width == srcImgInfo.alignedWidth) && (dstImgInfo.height == srcImgInfo.alignedHeight)) { - DHLOGE("Comparison ImgInfo fail: dstwidth %d, dstheight %d, srcAlignedWidth %d, srcAlignedHeight %d.", - dstImgInfo.width, dstImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight); + if ((dstImgInfo.width == srcImgInfo.alignedWidth) && (dstImgInfo.height == srcImgInfo.alignedHeight) && + (dstImgInfo.colorFormat == srcImgInfo.colorFormat)) { + DHLOGE("Comparison ImgInfo fail: dstwidth %d, dstheight %d, dstColorFormat %d, " + "srcAlignedWidth %d, srcAlignedHeight %d, srcColorFormat %d.", + dstImgInfo.width, dstImgInfo.height, dstImgInfo.colorFormat, + srcImgInfo.alignedWidth, srcImgInfo.alignedHeight, srcImgInfo.colorFormat); return false; } @@ -245,118 +221,161 @@ bool ScaleConvertProcess::IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo) imgInfo.imgSize >= expectedImgSize && imgInfo.chromaOffset == expectedChromaOffset); } -int32_t ScaleConvertProcess::ScaleConvert(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +int32_t ScaleConvertProcess::ScaleConvert(ImageUnitInfo& srcImgInfo, ImageUnitInfo& dstImgInfo) { - DHLOGD("ScaleConvertProcess : Scale convert start."); + DHLOGD("Scale convert start."); if (!CheckScaleConvertInfo(srcImgInfo, dstImgInfo)) { - DHLOGE("ScaleConvertProcess : CheckScaleConvertInfo failed."); + DHLOGE("CheckScaleConvertInfo failed."); return DCAMERA_BAD_VALUE; } - std::lock_guard autoLock(scaleMutex_); - switch (GetAVPixelFormat(srcImgInfo.colorFormat)) { - case AV_PIX_FMT_YUV420P: { - int32_t ret = CopyYUV420SrcData(srcImgInfo); - if (ret != DCAMERA_OK) { - DHLOGE("ScaleConvertProcess::ScaleConvert copy yuv420p src data failed."); - return ret; - } - break; - } - case AV_PIX_FMT_NV12: { - int32_t ret = CopyNV12SrcData(srcImgInfo); - if (ret != DCAMERA_OK) { - DHLOGE("ScaleConvertProcess::ScaleConvert copy nv12 src data failed."); - return ret; - } - break; - } - case AV_PIX_FMT_NV21: { - int32_t ret = CopyNV21SrcData(srcImgInfo); - if (ret != DCAMERA_OK) { - DHLOGE("ScaleConvertProcess::ScaleConvert copy nv21 src data failed."); - return ret; - } - break; - } - default: - DHLOGE("Unknown pixel format not support."); - return DCAMERA_BAD_VALUE; + int32_t ret = ConvertNV12ToI420(srcImgInfo); + if (ret != DCAMERA_OK) { + DHLOGE("Convert NV12 to I420 failed."); + return ret; } - sws_scale(swsContext_, static_cast(srcData_), srcLineSize_, 0, srcImgInfo.alignedHeight, - dstData_, dstLineSize_); - int32_t ret = memcpy_s(dstImgInfo.imgData, dstImgInfo.imgSize, dstData_[0], dstBuffSize_); - if (ret != EOK) { - DHLOGE("ScaleConvertProcess::ScaleConvert copy dst image info failed, ret = %d", ret); - return DCAMERA_MEMORY_OPT_ERROR; + ret = ConvertI420Scale(srcImgInfo, dstImgInfo); + if (ret != DCAMERA_OK) { + DHLOGE("Convert I420 scale failed."); + return ret; } + + ret = ConvertI420ToNV21(srcImgInfo, dstImgInfo); + if (ret != DCAMERA_OK) { + DHLOGE("Convert I420 to NV21 failed."); + return ret; + } + + DHLOGI("Scale convert end."); return DCAMERA_OK; } -int32_t ScaleConvertProcess::CopyYUV420SrcData(const ImageUnitInfo& srcImgInfo) +int32_t ScaleConvertProcess::ConvertNV12ToI420(ImageUnitInfo& srcImgInfo) { - int32_t ret = memcpy_s(srcData_[0], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, - srcImgInfo.imgData, srcImgInfo.alignedWidth * srcImgInfo.alignedHeight); - if (ret != EOK) { - DHLOGE("ScaleConvertProcess::CopyYUV420SrcData memory copy failed, ret = %d", ret); - return DCAMERA_MEMORY_OPT_ERROR; + DHLOGD("Convert NV12 to I420, format=%d, width=[%d, %d], height=[%d, %d]", srcImgInfo.colorFormat, + srcImgInfo.width, srcImgInfo.alignedWidth, srcImgInfo.height, srcImgInfo.alignedHeight); + int srcSizeY = srcImgInfo.alignedWidth * srcImgInfo.alignedHeight; + uint8_t *srcDataY = srcImgInfo.imgData; + uint8_t *srcDataUV = srcImgInfo.imgData + srcSizeY; + + int dstSizeY = srcImgInfo.width * srcImgInfo.height; + int dstSizeUV = (srcImgInfo.width >> 1) * (srcImgInfo.height >> 1); + auto dstBuf = std::make_shared(dstSizeY * YUV_BYTES_PER_PIXEL / Y2UV_RATIO); + uint8_t *dstDataY = dstBuf->Data(); + uint8_t *dstDataU = dstBuf->Data() + dstSizeY; + uint8_t *dstDataV = dstBuf->Data() + dstSizeY + dstSizeUV; + + int32_t ret = libyuv::NV12ToI420( + srcDataY, srcImgInfo.width, + srcDataUV, srcImgInfo.width, + dstDataY, srcImgInfo.width, + dstDataU, srcImgInfo.width >> 1, + dstDataV, srcImgInfo.width >> 1, + srcImgInfo.width, srcImgInfo.height); + if (ret != DCAMERA_OK) { + DHLOGE("Convert NV12 to I420 failed."); + return DCAMERA_BAD_VALUE; } - ret = memcpy_s(srcData_[1], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV, - srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, - srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV); - if (ret != EOK) { - DHLOGE("ScaleConvertProcess::CopyYUV420SrcData memory copy failed, ret = %d", ret); - return DCAMERA_MEMORY_OPT_ERROR; + srcImgInfo.colorFormat = Videoformat::YUVI420; + srcImgInfo.alignedWidth = srcImgInfo.width; + srcImgInfo.alignedHeight = srcImgInfo.height; + srcImgInfo.chromaOffset = srcImgInfo.alignedWidth * srcImgInfo.alignedHeight; + srcImgInfo.imgSize = dstBuf->Size(); + errno_t err = memcpy_s(srcImgInfo.imgData, srcImgInfo.imgSize, dstBuf->Data(), dstBuf->Size()); + if (err != EOK) { + DHLOGE("memcpy_s srcImgInfo imgData failed."); + return DCAMERA_BAD_VALUE; } - ret = memcpy_s(srcData_[2], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV, - srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight + - srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV, - srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV); - if (ret != EOK) { - DHLOGE("ScaleConvertProcess::CopyYUV420SrcData memory copy failed, ret = %d", ret); - return DCAMERA_MEMORY_OPT_ERROR; - } + DHLOGD("Convert NV12 to I420 success."); return DCAMERA_OK; } -int32_t ScaleConvertProcess::CopyNV12SrcData(const ImageUnitInfo& srcImgInfo) +int32_t ScaleConvertProcess::ConvertI420Scale(ImageUnitInfo& srcImgInfo, ImageUnitInfo& dstImgInfo) { - int32_t ret = memcpy_s(srcData_[0], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, - srcImgInfo.imgData, srcImgInfo.alignedWidth * srcImgInfo.alignedHeight); - if (ret != EOK) { - DHLOGE("ScaleConvertProcess::CopyNV12SrcData memory copy failed, ret = %d", ret); - return DCAMERA_MEMORY_OPT_ERROR; + if ((srcImgInfo.width == dstImgInfo.width) && (srcImgInfo.height == dstImgInfo.height)) { + return DCAMERA_OK; + } + + DHLOGD("Convert I420 Scale: format=%d, width=[%d, %d], height=[%d, %d]", srcImgInfo.colorFormat, + srcImgInfo.width, srcImgInfo.alignedWidth, srcImgInfo.height, srcImgInfo.alignedHeight); + int srcSizeY = srcImgInfo.width * srcImgInfo.height; + int srcSizeUV = (srcImgInfo.width >> 1) * (srcImgInfo.height >> 1); + uint8_t *srcDataY = srcImgInfo.imgData; + uint8_t *srcDataU = srcImgInfo.imgData + srcSizeY; + uint8_t *srcDataV = srcImgInfo.imgData + srcSizeY + srcSizeUV; + + int dstSizeY = dstImgInfo.width * dstImgInfo.height; + int dstSizeUV = (dstImgInfo.width >> 1) * (dstImgInfo.height >> 1); + auto dstBuf = std::make_shared(dstSizeY * YUV_BYTES_PER_PIXEL / Y2UV_RATIO); + uint8_t *dstDataY = dstBuf->Data(); + uint8_t *dstDataU = dstBuf->Data() + dstSizeY; + uint8_t *dstDataV = dstBuf->Data() + dstSizeY + dstSizeUV; + + int32_t ret = libyuv::I420Scale( + srcDataY, srcImgInfo.width, + srcDataU, srcImgInfo.width >> 1, + srcDataV, srcImgInfo.width >> 1, + srcImgInfo.width, srcImgInfo.height, + dstDataY, dstImgInfo.width, + dstDataU, dstImgInfo.width >> 1, + dstDataV, dstImgInfo.width >> 1, + dstImgInfo.width, dstImgInfo.height, + libyuv::FilterMode::kFilterNone); + if (ret != DCAMERA_OK) { + DHLOGE("Convert I420 scale failed."); + return DCAMERA_BAD_VALUE; } - ret = memcpy_s(srcData_[1], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_NV, - srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, - srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_NV); - if (ret != EOK) { - DHLOGE("ScaleConvertProcess::CopyNV12SrcData memory copy failed, ret = %d", ret); - return DCAMERA_MEMORY_OPT_ERROR; + srcImgInfo.width = dstImgInfo.width; + srcImgInfo.height = dstImgInfo.height; + srcImgInfo.alignedWidth = dstImgInfo.alignedWidth; + srcImgInfo.alignedHeight = dstImgInfo.alignedHeight; + srcImgInfo.chromaOffset = srcImgInfo.alignedWidth * srcImgInfo.alignedHeight; + srcImgInfo.imgSize = dstBuf->Size(); + errno_t err = memcpy_s(srcImgInfo.imgData, srcImgInfo.imgSize, dstBuf->Data(), dstBuf->Size()); + if (err != EOK) { + DHLOGE("memcpy_s srcImgInfo imgData failed."); + return DCAMERA_BAD_VALUE; } + + DHLOGD("Convert I420 scale success."); return DCAMERA_OK; } -int32_t ScaleConvertProcess::CopyNV21SrcData(const ImageUnitInfo& srcImgInfo) +int32_t ScaleConvertProcess::ConvertI420ToNV21(ImageUnitInfo& srcImgInfo, ImageUnitInfo& dstImgInfo) { - int32_t ret = memcpy_s(srcData_[0], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, - srcImgInfo.imgData, srcImgInfo.alignedWidth * srcImgInfo.alignedHeight); - if (ret != EOK) { - DHLOGE("ScaleConvertProcess::CopyNV21SrcData memory copy failed, ret = %d", ret); - return DCAMERA_MEMORY_OPT_ERROR; + if (srcImgInfo.colorFormat == dstImgInfo.colorFormat) { + return DCAMERA_OK; } - ret = memcpy_s(srcData_[1], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_NV, - srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, - srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_NV); - if (ret != EOK) { - DHLOGE("ScaleConvertProcess::CopyNV21SrcData memory copy failed, ret = %d", ret); - return DCAMERA_MEMORY_OPT_ERROR; + DHLOGI("Convert I420 to NV21: format=%d, width=[%d, %d], height=[%d, %d]", srcImgInfo.colorFormat, + srcImgInfo.width, srcImgInfo.alignedWidth, srcImgInfo.height, srcImgInfo.alignedHeight); + int srcSizeY = srcImgInfo.width * srcImgInfo.height; + int srcSizeUV = (srcImgInfo.width >> 1) * (srcImgInfo.height >> 1); + uint8_t *srcDataY = srcImgInfo.imgData; + uint8_t *srcDataU = srcImgInfo.imgData + srcSizeY; + uint8_t *srcDataV = srcImgInfo.imgData + srcSizeY + srcSizeUV; + + int dstSizeY = dstImgInfo.width * dstImgInfo.height; + uint8_t *dstDataY = dstImgInfo.imgData; + uint8_t *dstDataUV = dstImgInfo.imgData + dstSizeY; + + int32_t ret = libyuv::I420ToNV21( + srcDataY, srcImgInfo.width, + srcDataU, srcImgInfo.width >> 1, + srcDataV, srcImgInfo.width >> 1, + dstDataY, dstImgInfo.width, + dstDataUV, dstImgInfo.width, + dstImgInfo.width, dstImgInfo.height); + if (ret != DCAMERA_OK) { + DHLOGE("Convert I420 to NV21 failed."); + return DCAMERA_BAD_VALUE; } + + DHLOGD("Convert I420 to NV21 success."); return DCAMERA_OK; } @@ -387,23 +406,6 @@ int32_t ScaleConvertProcess::ConvertDone(std::vector return DCAMERA_OK; } -AVPixelFormat ScaleConvertProcess::GetAVPixelFormat(Videoformat colorFormat) -{ - AVPixelFormat format; - switch (colorFormat) { - case Videoformat::NV12: - format = AVPixelFormat::AV_PIX_FMT_NV12; - break; - case Videoformat::NV21: - format = AVPixelFormat::AV_PIX_FMT_NV21; - break; - default: - format = AVPixelFormat::AV_PIX_FMT_YUV420P; - break; - } - return format; -} - int32_t ScaleConvertProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier) { return DCAMERA_OK; diff --git a/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp b/services/data_process/src/pipeline_node/scale_conversion/scale_convert_process_common.cpp similarity index 35% rename from services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp rename to services/data_process/src/pipeline_node/scale_conversion/scale_convert_process_common.cpp index 4986f5fd94e1674baaf7fa0451740c3b58c766b3..08d19b808a8a092e4e66918159686e719afd2942 100644 --- a/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp +++ b/services/data_process/src/pipeline_node/scale_conversion/scale_convert_process_common.cpp @@ -13,98 +13,144 @@ * limitations under the License. */ -#include "color_format_process.h" +#include "dcamera_utils_tools.h" #include "distributed_camera_constants.h" #include "distributed_camera_errno.h" #include "distributed_hardware_log.h" +#include "scale_convert_process.h" namespace OHOS { namespace DistributedHardware { -ColorFormatProcess::~ColorFormatProcess() +ScaleConvertProcess::~ScaleConvertProcess() { - if (isColorFormatProcess_.load()) { - DHLOGD("~ColorFormatProcess : ReleaseProcessNode."); + if (isScaleConvert_.load()) { + DHLOGI("~ScaleConvertProcess : ReleaseProcessNode"); ReleaseProcessNode(); } } -int32_t ColorFormatProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, +int32_t ScaleConvertProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, VideoConfigParams& processedConfig) { - DHLOGD("ColorFormatProcess : InitNode."); + DHLOGI("ScaleConvertProcess : InitNode."); + sourceConfig_ = sourceConfig; + targetConfig_ = targetConfig; + processedConfig_ = sourceConfig; + processedConfig_.SetWidthAndHeight(targetConfig.GetWidth(), targetConfig.GetHeight()); + processedConfig_.SetVideoformat(targetConfig.GetVideoformat()); + processedConfig = processedConfig_; + + DHLOGI("cmh sourceConfig: Videoformat %d Width %d, Height %d is the same as the targetConfig: " + "Videoformat %d Width %d, Height %d.", + sourceConfig.GetVideoformat(), sourceConfig.GetWidth(), sourceConfig.GetHeight(), + targetConfig.GetVideoformat(), targetConfig.GetWidth(), targetConfig.GetHeight()); if (!IsConvertible(sourceConfig, targetConfig)) { - DHLOGE("sourceConfig: Videoformat %d Width %d, Height %d, targetConfig: Videoformat %d Width %d, Height %d.", + DHLOGI("sourceConfig: Videoformat %d Width %d, Height %d, targetConfig: Videoformat %d Width %d, Height %d.", sourceConfig.GetVideoformat(), sourceConfig.GetWidth(), sourceConfig.GetHeight(), targetConfig.GetVideoformat(), targetConfig.GetWidth(), targetConfig.GetHeight()); - return DCAMERA_BAD_TYPE; + isScaleConvert_.store(true); + return DCAMERA_OK; } - sourceConfig_ = sourceConfig; - targetConfig_ = targetConfig; - processedConfig_ = sourceConfig; + int32_t ret = av_image_alloc(srcData_, srcLineSize_, sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), + GetAVPixelFormat(sourceConfig_.GetVideoformat()), SOURCE_ALIGN); + if (ret < DCAMERA_OK) { + DHLOGE("Could not allocate source image."); + return DCAMERA_BAD_VALUE; + } - if (sourceConfig_.GetVideoformat() != targetConfig_.GetVideoformat()) { - processedConfig_.SetVideoformat(targetConfig_.GetVideoformat()); + dstBuffSize_ = av_image_alloc(dstData_, dstLineSize_, processedConfig_.GetWidth(), processedConfig_.GetHeight(), + GetAVPixelFormat(processedConfig_.GetVideoformat()), TARGET_ALIGN); + if (dstBuffSize_ < DCAMERA_OK) { + DHLOGE("Could not allocate destination image."); + return DCAMERA_BAD_VALUE; } - processedConfig = processedConfig_; - isColorFormatProcess_.store(true); + swsContext_ = sws_getContext(sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), + GetAVPixelFormat(sourceConfig_.GetVideoformat()), processedConfig_.GetWidth(), processedConfig_.GetHeight(), + GetAVPixelFormat(processedConfig_.GetVideoformat()), SWS_FAST_BILINEAR, nullptr, nullptr, nullptr); + if (swsContext_ == nullptr) { + DHLOGE("Create SwsContext failed."); + return DCAMERA_BAD_VALUE; + } + + isScaleConvert_.store(true); return DCAMERA_OK; } -bool ColorFormatProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) +bool ScaleConvertProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) { - return ((sourceConfig.GetVideoformat() == targetConfig.GetVideoformat() || - (sourceConfig.GetVideoformat() == Videoformat::NV12 && targetConfig.GetVideoformat() == Videoformat::NV21)) && - sourceConfig.GetWidth() == targetConfig.GetWidth() && sourceConfig.GetHeight() == targetConfig.GetHeight()); + return (sourceConfig_.GetWidth() != targetConfig.GetWidth()) || + (sourceConfig_.GetHeight() != targetConfig.GetHeight()) || + (sourceConfig_.GetVideoformat() != targetConfig.GetVideoformat()); } -void ColorFormatProcess::ReleaseProcessNode() +void ScaleConvertProcess::ReleaseProcessNode() { - DHLOGD("Start release [%d] node : ColorFormatNode.", nodeRank_); - isColorFormatProcess_.store(false); + DHLOGI("Start release [%d] node : ScaleConvertNode.", nodeRank_); + isScaleConvert_.store(false); + + { + std::lock_guard autoLock(scaleMutex_); + if (swsContext_ != nullptr) { + av_freep(&srcData_[0]); + av_freep(&dstData_[0]); + sws_freeContext(swsContext_); + swsContext_ = nullptr; + } + } if (nextDataProcess_ != nullptr) { nextDataProcess_->ReleaseProcessNode(); nextDataProcess_ = nullptr; } - DHLOGD("Release [%d] node : ColorFormatNode end.", nodeRank_); + DHLOGI("Release [%d] node : ScaleConvertNode end.", nodeRank_); } -int32_t ColorFormatProcess::ProcessData(std::vector>& inputBuffers) +int ScaleConvertProcess::ProcessData(std::vector>& inputBuffers) { - DHLOGD("Process data in ColorFormatProcess."); + DHLOGD("Process data in ScaleConvertProcess."); + if (!isScaleConvert_.load()) { + DHLOGE("Scale Convert node occurred error or start release."); + return DCAMERA_DISABLE_PROCESS; + } + if (inputBuffers.empty() || inputBuffers[0] == nullptr) { DHLOGE("The input data buffers is empty."); return DCAMERA_BAD_VALUE; } - if (sourceConfig_.GetVideoformat() == processedConfig_.GetVideoformat()) { - DHLOGD("The target Video Format : %d is the same as the source Video Format : %d.", - sourceConfig_.GetVideoformat(), processedConfig_.GetVideoformat()); - return ColorFormatDone(inputBuffers); + DHLOGD("cmh The target resolution: %dx%d format: %d is the same as the source resolution: %dx%d format: %d", + processedConfig_.GetWidth(), processedConfig_.GetHeight(), processedConfig_.GetVideoformat(), + sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), sourceConfig_.GetVideoformat()); + if (!IsConvertible(sourceConfig_, processedConfig_)) { + DHLOGD("The target resolution: %dx%d format: %d is the same as the source resolution: %dx%d format: %d", + processedConfig_.GetWidth(), processedConfig_.GetHeight(), processedConfig_.GetVideoformat(), + sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), sourceConfig_.GetVideoformat()); + return ConvertDone(inputBuffers); } int64_t timeStamp = 0; if (!(inputBuffers[0]->FindInt64(TIME_STAMP_US, timeStamp))) { - DHLOGE("ColorConvertProcess : Find inputBuffer %s failed.", TIME_STAMP_US.c_str()); + DHLOGE("ScaleConvertProcess : Find inputBuffer %s failed.", TIME_STAMP_US.c_str()); return DCAMERA_BAD_VALUE; } ImageUnitInfo srcImgInfo {Videoformat::YUVI420, 0, 0, 0, 0, 0, 0, nullptr}; - if (GetImageUnitInfo(srcImgInfo, inputBuffers[0]) != DCAMERA_OK || !CheckColorProcessInputInfo(srcImgInfo)) { - DHLOGE("ColorConvertProcess : srcImgInfo error."); + if ((GetImageUnitInfo(srcImgInfo, inputBuffers[0]) != DCAMERA_OK) || !CheckScaleProcessInputInfo(srcImgInfo)) { + DHLOGE("ScaleConvertProcess : srcImgInfo error."); return DCAMERA_BAD_VALUE; } - size_t dstBufsize = static_cast(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() * - YUV_BYTES_PER_PIXEL / Y2UV_RATIO); - std::shared_ptr dstBuf = std::make_shared(dstBufsize); + std::string fileName = "/data/log/dcamera_decode_scale_start.yuv"; + SaveFile(fileName, inputBuffers[0]); + + std::shared_ptr dstBuf = std::make_shared(dstBuffSize_); ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), processedConfig_.GetWidth(), processedConfig_.GetHeight(), processedConfig_.GetWidth(), processedConfig_.GetHeight(), processedConfig_.GetWidth() * processedConfig_.GetHeight(), dstBuf->Size(), dstBuf->Data() }; - if (ColorConvertByColorFormat(srcImgInfo, dstImgInfo) != DCAMERA_OK) { - DHLOGE("ColorConvertProcess : ColorConvertByColorFormat failed."); + if (ScaleConvert(srcImgInfo, dstImgInfo) != DCAMERA_OK) { + DHLOGE("ScaleConvertProcess : Scale convert failed."); return DCAMERA_BAD_OPERATE; } @@ -117,10 +163,10 @@ int32_t ColorFormatProcess::ProcessData(std::vector> std::vector> outputBuffers; outputBuffers.push_back(dstBuf); - return ColorFormatDone(outputBuffers); + return ConvertDone(outputBuffers); } -int32_t ColorFormatProcess::GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::shared_ptr& imgBuf) +int32_t ScaleConvertProcess::GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::shared_ptr& imgBuf) { if (imgBuf == nullptr) { DHLOGE("GetImageUnitInfo failed, imgBuf is nullptr."); @@ -134,6 +180,7 @@ int32_t ColorFormatProcess::GetImageUnitInfo(ImageUnitInfo& imgInfo, const std:: DHLOGE("GetImageUnitInfo failed, Videoformat is null."); return DCAMERA_NOT_FOUND; } + DHLOGI("cmh GetImageUnitInfo, colorFormat %d.", colorFormat); if (colorFormat != static_cast(Videoformat::YUVI420) && colorFormat != static_cast(Videoformat::NV12) && colorFormat != static_cast(Videoformat::NV21)) { @@ -158,38 +205,40 @@ int32_t ColorFormatProcess::GetImageUnitInfo(ImageUnitInfo& imgInfo, const std:: DHLOGE("Get the imgData of the imgBuf failed."); return DCAMERA_BAD_VALUE; } - DHLOGD("ColorFormatProcess imgBuf info : Videoformat %d, alignedWidth %d, alignedHeight %d, width %d, height %d," + - " chromaOffset %d, imgSize %d.", imgInfo.colorFormat, imgInfo.width, imgInfo.height, imgInfo.alignedWidth, - imgInfo.alignedHeight, imgInfo.chromaOffset, imgInfo.imgSize); + DHLOGD("ScaleConvertProcess imgBuf info : Videoformat %d, alignedWidth %d, alignedHeight %d, width %d, height %d" + + ", chromaOffset %d, imgSize %d.", imgInfo.colorFormat, imgInfo.alignedWidth, imgInfo.alignedHeight, + imgInfo.width, imgInfo.height, imgInfo.chromaOffset, imgInfo.imgSize); return DCAMERA_OK; } -bool ColorFormatProcess::CheckColorProcessInputInfo(const ImageUnitInfo& srcImgInfo) +bool ScaleConvertProcess::CheckScaleProcessInputInfo(const ImageUnitInfo& srcImgInfo) { return srcImgInfo.colorFormat == sourceConfig_.GetVideoformat() && - srcImgInfo.alignedWidth == sourceConfig_.GetWidth() && - srcImgInfo.alignedHeight == sourceConfig_.GetHeight() && + srcImgInfo.width == sourceConfig_.GetWidth() && + srcImgInfo.height == sourceConfig_.GetHeight() && IsCorrectImageUnitInfo(srcImgInfo); } -bool ColorFormatProcess::CheckColorConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +bool ScaleConvertProcess::CheckScaleConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) { if (srcImgInfo.imgData == nullptr || dstImgInfo.imgData == nullptr) { DHLOGE("The imgData of srcImgInfo or the imgData of dstImgInfo are null!"); return false; } - if (srcImgInfo.colorFormat != Videoformat::NV12 && dstImgInfo.colorFormat != Videoformat::NV21) { - DHLOGE("CopyInfo error : srcImgInfo colorFormat %d, dstImgInfo colorFormat %d.", - srcImgInfo.colorFormat, dstImgInfo.colorFormat); - return false; - } + DHLOGI("cmh CheckScaleConvertInfo srcImginfo: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " + + "imgSize %lld.", srcImgInfo.width, srcImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight, + srcImgInfo.chromaOffset, srcImgInfo.imgSize); if (!IsCorrectImageUnitInfo(srcImgInfo)) { DHLOGE("srcImginfo fail: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " + "imgSize %lld.", srcImgInfo.width, srcImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight, srcImgInfo.chromaOffset, srcImgInfo.imgSize); return false; } + + DHLOGI("cmh CheckScaleConvertInfo dstImginfo: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " + + "imgSize %lld.", dstImgInfo.width, dstImgInfo.height, dstImgInfo.alignedWidth, dstImgInfo.alignedHeight, + dstImgInfo.chromaOffset, dstImgInfo.imgSize); if (!IsCorrectImageUnitInfo(dstImgInfo)) { DHLOGE("dstImginfo fail: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " + "imgSize %lld.", dstImgInfo.width, dstImgInfo.height, dstImgInfo.alignedWidth, dstImgInfo.alignedHeight, @@ -197,273 +246,163 @@ bool ColorFormatProcess::CheckColorConvertInfo(const ImageUnitInfo& srcImgInfo, return false; } - if (dstImgInfo.width > srcImgInfo.alignedWidth || dstImgInfo.height > srcImgInfo.alignedHeight) { + if ((dstImgInfo.width == srcImgInfo.alignedWidth) && (dstImgInfo.height == srcImgInfo.alignedHeight) && + (dstImgInfo.colorFormat == srcImgInfo.colorFormat)) { DHLOGE("Comparison ImgInfo fail: dstwidth %d, dstheight %d, srcAlignedWidth %d, srcAlignedHeight %d.", dstImgInfo.width, dstImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight); return false; } + return true; } -bool ColorFormatProcess::IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo) +bool ScaleConvertProcess::IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo) { size_t expectedImgSize = static_cast(imgInfo.alignedWidth * imgInfo.alignedHeight * - YUV_BYTES_PER_PIXEL / Y2UV_RATIO); + YUV_BYTES_PER_PIXEL / Y2UV_RATIO); size_t expectedChromaOffset = static_cast(imgInfo.alignedWidth * imgInfo.alignedHeight); return (imgInfo.width <= imgInfo.alignedWidth && imgInfo.height <= imgInfo.alignedHeight && imgInfo.imgSize >= expectedImgSize && imgInfo.chromaOffset == expectedChromaOffset); } -/** -* @brief Separate a row of srcUVPlane into half a row of dstUPlane and half a row of dstVPlane. For example, -* converts the UVPlane memory arrangement of NV12 to the UV memory arrangement of YUVI420. Note that the -* stride and width of the dstImage must be the same. -*/ -void ColorFormatProcess::SeparateUVPlaneByRow(const uint8_t *srcUVPlane, uint8_t *dstUPlane, uint8_t *dstVPlane, - int32_t srcHalfWidth) -{ - int32_t memoryOffset0 = 0; - int32_t memoryOffset1 = 1; - int32_t memoryOffset2 = 2; - int32_t memoryOffset3 = 3; - int32_t perSeparatebytes = 4; - for (int32_t x = 0; x < srcHalfWidth - 1; x += memoryOffset2) { - dstUPlane[x] = srcUVPlane[memoryOffset0]; - dstUPlane[x + memoryOffset1] = srcUVPlane[memoryOffset2]; - dstVPlane[x] = srcUVPlane[memoryOffset1]; - dstVPlane[x + memoryOffset1] = srcUVPlane[memoryOffset3]; - srcUVPlane += perSeparatebytes; - } - if (static_cast(srcHalfWidth) & 1) { - dstUPlane[srcHalfWidth - 1] = srcUVPlane[memoryOffset0]; - dstVPlane[srcHalfWidth - 1] = srcUVPlane[memoryOffset1]; - } -} - -int32_t ColorFormatProcess::SeparateNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +int32_t ScaleConvertProcess::ScaleConvert(ImageUnitInfo& srcImgInfo, ImageUnitInfo& dstImgInfo) { - if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) { - DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed."); + DHLOGD("ScaleConvertProcess : Scale convert start."); + if (!CheckScaleConvertInfo(srcImgInfo, dstImgInfo)) { + DHLOGE("ScaleConvertProcess : CheckScaleConvertInfo failed."); return DCAMERA_BAD_VALUE; } - uint8_t *srcUVPlane = srcImgInfo.imgData + srcImgInfo.chromaOffset; - int32_t srcUVStride = srcImgInfo.alignedWidth; - uint8_t *dstUPlane = dstImgInfo.imgData + dstImgInfo.chromaOffset; - int32_t dstUStride = dstImgInfo.alignedWidth / Y2UV_RATIO; - uint8_t *dstVPlane = dstUPlane + (dstImgInfo.chromaOffset / Y2UV_RATIO) / Y2UV_RATIO; - int32_t dstVStride = dstImgInfo.alignedWidth / Y2UV_RATIO; - int32_t width = srcImgInfo.width / Y2UV_RATIO; - int32_t height = srcImgInfo.height / Y2UV_RATIO; - DHLOGD("srcUVStride %d, dstUStride %d, dstVStride %d, src half width %d, src half height %d.", - srcUVStride, dstUStride, dstVStride, width, height); - - /* Negative height means invert the image. */ - if (height < 0) { - height = -height; - dstUPlane = dstUPlane + (height - 1) * dstUStride; - dstVPlane = dstVPlane + (height - 1) * dstVStride; - dstUStride = -dstUStride; - dstVStride = -dstVStride; - } - /* No black border of srcImage and dstImage, and the strides of srcImage and dstImage are equal. */ - if (srcUVStride == width * Y2UV_RATIO && dstUStride == width && dstVStride == width) { - SeparateUVPlaneByRow(srcUVPlane, dstUPlane, dstVPlane, width * height); - return DCAMERA_OK; - } - /* Black borders exist in srcImage or dstImage. */ - for (int32_t y = 0; y < height; ++y) { - SeparateUVPlaneByRow(srcUVPlane, dstUPlane, dstVPlane, width); - dstUPlane += dstUStride; - dstVPlane += dstVStride; - srcUVPlane += srcUVStride; - } - return DCAMERA_OK; -} - -/** -* @brief Combine half a row of srcUPlane and half a row of srcVPlane into a row of dstUVPlane. For example, -* converts the UVPlane memory arrangement of YUVI420 to the UV memory arrangement of NV12. Note that the -* stride and width of the srcImage must be the same. -*/ -void ColorFormatProcess::CombineUVPlaneByRow(const uint8_t *srcUPlane, const uint8_t *srcVPlane, uint8_t *dstUVPlane, - int32_t dstHalfWidth) -{ - int32_t memoryOffset0 = 0; - int32_t memoryOffset1 = 1; - int32_t memoryOffset2 = 2; - int32_t memoryOffset3 = 3; - int32_t perCombinebytes = 4; - for (int32_t x = 0; x < dstHalfWidth - 1; x += memoryOffset2) { - dstUVPlane[memoryOffset0] = srcUPlane[x]; - dstUVPlane[memoryOffset1] = srcVPlane[x]; - dstUVPlane[memoryOffset2] = srcUPlane[x + memoryOffset1]; - dstUVPlane[memoryOffset3] = srcVPlane[x + memoryOffset1]; - dstUVPlane += perCombinebytes; - } - if (static_cast(dstHalfWidth) & 1) { - dstUVPlane[memoryOffset0] = srcUPlane[dstHalfWidth - 1]; - dstUVPlane[memoryOffset1] = srcVPlane[dstHalfWidth - 1]; - } -} + DHLOGI("cmh ScaleConvert srcImgInfo.colorFormat is %d.", srcImgInfo.colorFormat); -int32_t ColorFormatProcess::CombineNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) -{ - if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) { - DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed."); - return DCAMERA_BAD_VALUE; + std::lock_guard autoLock(scaleMutex_); + switch (GetAVPixelFormat(srcImgInfo.colorFormat)) { + case AV_PIX_FMT_YUV420P: { + int32_t ret = CopyYUV420SrcData(srcImgInfo); + if (ret != DCAMERA_OK) { + DHLOGE("ScaleConvertProcess::ScaleConvert copy yuv420p src data failed."); + return ret; + } + break; + } + case AV_PIX_FMT_NV12: { + int32_t ret = CopyNV12SrcData(srcImgInfo); + if (ret != DCAMERA_OK) { + DHLOGE("ScaleConvertProcess::ScaleConvert copy nv12 src data failed."); + return ret; + } + break; + } + case AV_PIX_FMT_NV21: { + int32_t ret = CopyNV21SrcData(srcImgInfo); + if (ret != DCAMERA_OK) { + DHLOGE("ScaleConvertProcess::ScaleConvert copy nv21 src data failed."); + return ret; + } + break; + } + default: + DHLOGE("Unknown pixel format not support."); + return DCAMERA_BAD_VALUE; } - uint8_t *srcVPlane = srcImgInfo.imgData + srcImgInfo.chromaOffset; - int32_t srcVStride = srcImgInfo.alignedWidth / Y2UV_RATIO; - uint8_t *srcUPlane = srcVPlane + (srcImgInfo.chromaOffset / Y2UV_RATIO) / Y2UV_RATIO; - int32_t srcUStride = srcImgInfo.alignedWidth / Y2UV_RATIO; - uint8_t *dstUVPlane = dstImgInfo.imgData + dstImgInfo.chromaOffset; - int32_t dstUVStride = dstImgInfo.alignedWidth; - int32_t width = dstImgInfo.width / Y2UV_RATIO; - int32_t height = dstImgInfo.height / Y2UV_RATIO; - DHLOGD("srcUStride %d, srcVStride %d, dstUVStride %d, dst half width %d, dst half height %d.", - srcUStride, srcVStride, dstUVStride, width, height); - - /* Negative height means invert the image. */ - if (height < 0) { - height = -height; - dstUVPlane = dstUVPlane + (height - 1) * dstUVStride; - dstUVStride = -dstUVStride; - } - /* No black border of srcImage and dstImage, and the strides of srcImage and dstImage are equal. */ - if (srcUStride == width && srcVStride == width && dstUVStride == width * Y2UV_RATIO) { - CombineUVPlaneByRow(srcUPlane, srcVPlane, dstUVPlane, width * height); - return DCAMERA_OK; - } - /* Black borders exist in srcImage or dstImage. */ - for (int32_t y = 0; y < height; ++y) { - CombineUVPlaneByRow(srcUPlane, srcVPlane, dstUVPlane, width); - srcUPlane += srcUStride; - srcVPlane += srcVStride; - dstUVPlane += dstUVStride; + sws_scale(swsContext_, static_cast(srcData_), srcLineSize_, 0, srcImgInfo.height, + dstData_, dstLineSize_); + int32_t ret = memcpy_s(dstImgInfo.imgData, dstImgInfo.imgSize, dstData_[0], dstBuffSize_); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::ScaleConvert copy dst image info failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; } return DCAMERA_OK; } -int32_t ColorFormatProcess::CopyYPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +int32_t ScaleConvertProcess::CopyYUV420SrcData(const ImageUnitInfo& srcImgInfo) { - if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) { - DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed."); - return DCAMERA_BAD_VALUE; - } - - int32_t totalCopyYPlaneSize = dstImgInfo.alignedWidth * dstImgInfo.height; - if (srcImgInfo.alignedWidth == dstImgInfo.width && dstImgInfo.alignedWidth == dstImgInfo.width) { - /* No black border of srcImage and dstImage, and the strides of srcImage and dstImage are equal. */ - errno_t err = memcpy_s(dstImgInfo.imgData, totalCopyYPlaneSize, srcImgInfo.imgData, totalCopyYPlaneSize); - if (err != EOK) { - DHLOGE("ColorConvert : memcpy_s CopyYPlaner failed by Coalesce rows."); - return DCAMERA_MEMORY_OPT_ERROR; - } - } else { - /* Black borders exist in srcImage or dstImage. */ - int32_t srcDataOffset = 0; - int32_t dstDataOffset = 0; - for (int32_t yh = 0; yh < dstImgInfo.height; yh++) { - errno_t err = memcpy_s(dstImgInfo.imgData + dstDataOffset, totalCopyYPlaneSize - dstDataOffset, - srcImgInfo.imgData + srcDataOffset, dstImgInfo.width); - if (err != EOK) { - DHLOGE("memcpy_s YPlane in line[%d] failed.", yh); - return DCAMERA_MEMORY_OPT_ERROR; - } - dstDataOffset += dstImgInfo.alignedWidth; - srcDataOffset += srcImgInfo.alignedWidth; - } - DHLOGD("ColorConvert :get valid yplane OK, srcImgInfo: alignedWidth %d, width %d, height %d. " + - "dstImgInfo: alignedWidth %d, width %d, height %d. dstDataOffset %d, srcDataOffset %d.", - srcImgInfo.alignedWidth, srcImgInfo.width, srcImgInfo.height, dstImgInfo.alignedWidth, - dstImgInfo.width, dstImgInfo.height, dstDataOffset, srcDataOffset); + DHLOGI("cmh ScaleConvertProcess::CopyYUV420SrcData start."); + int32_t ret = memcpy_s(srcData_[0], srcImgInfo.width * srcImgInfo.height, + srcImgInfo.imgData, srcImgInfo.width * srcImgInfo.height); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyYUV420SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; + } + + ret = memcpy_s(srcData_[1], srcImgInfo.width * srcImgInfo.height / MEMORY_RATIO_YUV, + srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, + srcImgInfo.width * srcImgInfo.height / MEMORY_RATIO_YUV); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyYUV420SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; + } + + ret = memcpy_s(srcData_[2], srcImgInfo.width * srcImgInfo.height / MEMORY_RATIO_YUV, + srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight + + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV, + srcImgInfo.width * srcImgInfo.height / MEMORY_RATIO_YUV); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyYUV420SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; } return DCAMERA_OK; } -int32_t ColorFormatProcess::ColorConvertNV12ToNV21(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +int32_t ScaleConvertProcess::CopyNV12SrcData(const ImageUnitInfo& srcImgInfo) { - if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) { - DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed."); - return DCAMERA_BAD_VALUE; + DHLOGI("cmh ScaleConvertProcess::CopyNV12SrcData start."); + int32_t ret = memcpy_s(srcData_[0], srcImgInfo.width * srcImgInfo.height, + srcImgInfo.imgData, srcImgInfo.width * srcImgInfo.height); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyNV12SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; } - int32_t err = CopyYPlane(srcImgInfo, dstImgInfo); - if (err != DCAMERA_OK) { - DHLOGE("ColorConvertNV12ToNV21 : CopyYPlane failed."); - return err; + ret = memcpy_s(srcData_[1], srcImgInfo.width * srcImgInfo.height / MEMORY_RATIO_NV, + srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, + srcImgInfo.width * srcImgInfo.height / MEMORY_RATIO_NV); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyNV12SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; } - - std::shared_ptr tempPlaneYUV = std::make_shared(dstImgInfo.imgSize); - ImageUnitInfo tempImgInfo = dstImgInfo; - tempImgInfo.imgData = tempPlaneYUV->Data(); - SeparateNV12UVPlane(srcImgInfo, tempImgInfo); - CombineNV12UVPlane(tempImgInfo, dstImgInfo); return DCAMERA_OK; } -int32_t ColorFormatProcess::ColorConvertNV12ToI420(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +int32_t ScaleConvertProcess::CopyNV21SrcData(const ImageUnitInfo& srcImgInfo) { - if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) { - DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed."); - return DCAMERA_BAD_VALUE; + DHLOGI("cmh ScaleConvertProcess::CopyNV21SrcData start."); + int32_t ret = memcpy_s(srcData_[0], srcImgInfo.width * srcImgInfo.height, + srcImgInfo.imgData, srcImgInfo.width * srcImgInfo.height); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyNV21SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; } - int32_t err = CopyYPlane(srcImgInfo, dstImgInfo); - if (err != DCAMERA_OK) { - DHLOGE("ColorConvertNV12ToNV21 : CopyYPlane failed."); - return err; + ret = memcpy_s(srcData_[1], srcImgInfo.width * srcImgInfo.height / MEMORY_RATIO_NV, + srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, + srcImgInfo.width * srcImgInfo.height / MEMORY_RATIO_NV); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyNV21SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; } - - SeparateNV12UVPlane(srcImgInfo, dstImgInfo); return DCAMERA_OK; } -int32_t ColorFormatProcess::ColorConvertByColorFormat(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) -{ - int32_t ret; - switch (srcImgInfo.colorFormat) { - case Videoformat::NV12: - switch (dstImgInfo.colorFormat) { - case Videoformat::NV21: - ret = ColorConvertNV12ToNV21(srcImgInfo, dstImgInfo); - break; - case Videoformat::YUVI420: - ret = ColorConvertNV12ToI420(srcImgInfo, dstImgInfo); - break; - default: - DHLOGE("Unsupport ColorConvert %d to %d.", srcImgInfo.colorFormat, dstImgInfo.colorFormat); - return DCAMERA_BAD_OPERATE; - } - break; - case Videoformat::NV21: - case Videoformat::YUVI420: - case Videoformat::RGBA_8888: - DHLOGE("Unsupport ColorConvert %d to %d.", srcImgInfo.colorFormat, dstImgInfo.colorFormat); - return DCAMERA_BAD_OPERATE; - } - return ret; -} - -int32_t ColorFormatProcess::ColorFormatDone(std::vector>& outputBuffers) +int32_t ScaleConvertProcess::ConvertDone(std::vector>& outputBuffers) { - DHLOGD("ColorFormat Done."); + DHLOGD("ScaleConvertProcess : Convert Done."); if (outputBuffers.empty()) { - DHLOGE("The received data buffers is empty."); + DHLOGE("The received data buffer is empty."); return DCAMERA_BAD_VALUE; } - + if (nextDataProcess_ != nullptr) { - DHLOGD("Send to the next node of the decoder for processing."); + DHLOGD("Send to the next node of the scale convert for processing."); int32_t err = nextDataProcess_->ProcessData(outputBuffers); if (err != DCAMERA_OK) { - DHLOGE("Someone node after the decoder processes failed."); + DHLOGE("Some node after the scale convert processes failed."); } return err; } - DHLOGD("The current node is the last node, and Output the processed video buffer"); + + DHLOGD("The current node is the last noed, and output the processed video buffer."); std::shared_ptr targetPipelineSource = callbackPipelineSource_.lock(); if (targetPipelineSource == nullptr) { DHLOGE("callbackPipelineSource_ is nullptr."); @@ -473,7 +412,24 @@ int32_t ColorFormatProcess::ColorFormatDone(std::vector - -#define private public -#include "color_format_process.h" -#undef private -#include "distributed_camera_constants.h" -#include "distributed_camera_errno.h" - -using namespace testing::ext; - -namespace OHOS { -namespace DistributedHardware { -class ColorFormatProcessTest : public testing::Test { -public: - static void SetUpTestCase(void); - static void TearDownTestCase(void); - void SetUp(); - void TearDown(); - - std::shared_ptr testColorFmtProcess_; -}; - -namespace { -const int32_t TEST_WIDTH = 1920; -const int32_t TEST_HEIGTH = 1080; -const int32_t TEST_WIDTH2 = 640; -const int32_t TEST_HEIGTH2 = 480; -} - -void ColorFormatProcessTest::SetUpTestCase(void) -{ -} - -void ColorFormatProcessTest::TearDownTestCase(void) -{ -} - -void ColorFormatProcessTest::SetUp(void) -{ - std::shared_ptr sourcePipeline = std::make_shared(); - std::weak_ptr callbackPipelineSource(sourcePipeline); - testColorFmtProcess_ = std::make_shared(callbackPipelineSource); -} - -void ColorFormatProcessTest::TearDown(void) -{ - testColorFmtProcess_ = nullptr; -} - -/** - * @tc.name: color_format_process_test_001 - * @tc.desc: Verify color format process InitNode normal. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_001, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV21, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_OK); -} - -/** - * @tc.name: color_format_process_test_002 - * @tc.desc: Verify color format process InitNode abnormal. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_002, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV21, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH2, - TEST_HEIGTH2); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_BAD_TYPE); -} - -/** - * @tc.name: color_format_process_test_003 - * @tc.desc: Verify color format process ProcessData inputBuffers is empty. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_003, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV21, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_OK); - - std::vector> inputBuffers; - rc = testColorFmtProcess_->ProcessData(inputBuffers); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); -} - -/** - * @tc.name: color_format_process_test_004 - * @tc.desc: Verify color format process ProcessData inputBuffers[0] is nullptr. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_004, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV21, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_OK); - - std::vector> inputBuffers; - std::shared_ptr db; - inputBuffers.push_back(db); - rc = testColorFmtProcess_->ProcessData(inputBuffers); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); -} - -/** - * @tc.name: color_format_process_test_005 - * @tc.desc: Verify color format process ProcessData find timeStamp failed. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_005, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV21, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_OK); - - size_t capacity = 100; - std::vector> inputBuffers; - std::shared_ptr db = std::make_shared(capacity); - inputBuffers.push_back(db); - rc = testColorFmtProcess_->ProcessData(inputBuffers); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); -} - -/** - * @tc.name: color_format_process_test_006 - * @tc.desc: Verify color format process ProcessData normal. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_006, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV21, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_OK); - - size_t capacity = 3200000; - int64_t timeStamp = 10; - std::vector> inputBuffers; - std::shared_ptr db = std::make_shared(capacity); - db->SetInt64("timeUs", timeStamp); - db->SetInt32("Videoformat", static_cast(Videoformat::NV12)); - db->SetInt32("alignedWidth", TEST_WIDTH); - db->SetInt32("alignedHeight", TEST_HEIGTH); - db->SetInt32("width", TEST_WIDTH); - db->SetInt32("height", TEST_HEIGTH); - inputBuffers.push_back(db); - std::shared_ptr sourcePipeline = std::make_shared(); - testColorFmtProcess_->callbackPipelineSource_ = sourcePipeline; - testColorFmtProcess_->ProcessData(inputBuffers); - EXPECT_EQ(rc, DCAMERA_OK); -} - -/** - * @tc.name: color_format_process_test_007 - * @tc.desc: Verify color format process InitNode abnormal. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_007, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH2); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_BAD_TYPE); -} - -/** - * @tc.name: color_format_process_test_008 - * @tc.desc: Verify color format process InitNode abnormal. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_008, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH2, - TEST_HEIGTH); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_BAD_TYPE); -} - -/** - * @tc.name: color_format_process_test_009 - * @tc.desc: Verify color format process ProcessData. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_009, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_OK); - - size_t capacity = 100; - std::vector> inputBuffers; - std::shared_ptr db = std::make_shared(capacity); - inputBuffers.push_back(db); - rc = testColorFmtProcess_->ProcessData(inputBuffers); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); -} - -/** - * @tc.name: color_format_process_test_010 - * @tc.desc: Verify color format process ProcessData. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_010, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV21, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_OK); - - size_t capacity = 100; - std::vector> inputBuffers; - std::shared_ptr db = std::make_shared(capacity); - int64_t timeStamp = 10; - db->SetInt64("timeUs", timeStamp); - inputBuffers.push_back(db); - rc = testColorFmtProcess_->ProcessData(inputBuffers); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); -} - -/** - * @tc.name: color_format_process_test_011 - * @tc.desc: Verify color format process GetImageUnitInfo. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_011, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - ImageUnitInfo srcImgInfo {Videoformat::YUVI420, 0, 0, 0, 0, 0, 0, nullptr}; - std::shared_ptr imgBuf = nullptr; - int32_t rc = testColorFmtProcess_->GetImageUnitInfo(srcImgInfo, imgBuf); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); - - size_t capacity = 100; - imgBuf = std::make_shared(capacity); - rc = testColorFmtProcess_->GetImageUnitInfo(srcImgInfo, imgBuf); - EXPECT_EQ(rc, DCAMERA_NOT_FOUND); - - imgBuf->SetInt32("Videoformat", static_cast(Videoformat::RGBA_8888)); - rc = testColorFmtProcess_->GetImageUnitInfo(srcImgInfo, imgBuf); - EXPECT_EQ(rc, DCAMERA_NOT_FOUND); -} - -/** - * @tc.name: color_format_process_test_012 - * @tc.desc: Verify color format process GetImageUnitInfo. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_012, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - ImageUnitInfo srcImgInfo {Videoformat::YUVI420, 0, 0, 0, 0, 0, 0, nullptr}; - size_t capacity = 100; - std::shared_ptr imgBuf = std::make_shared(capacity); - int64_t timeStamp = 10; - imgBuf->SetInt64("timeUs", timeStamp); - imgBuf->SetInt32("Videoformat", static_cast(Videoformat::NV12)); - imgBuf->SetInt32("alignedWidth", TEST_WIDTH); - imgBuf->SetInt32("width", TEST_WIDTH); - imgBuf->SetInt32("height", TEST_HEIGTH); - - int32_t rc = testColorFmtProcess_->GetImageUnitInfo(srcImgInfo, imgBuf); - EXPECT_EQ(rc, DCAMERA_NOT_FOUND); -} - -/** - * @tc.name: color_format_process_test_013 - * @tc.desc: Verify color format process GetImageUnitInfo. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_013, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - ImageUnitInfo srcImgInfo {Videoformat::YUVI420, 0, 0, 0, 0, 0, 0, nullptr}; - size_t capacity = 100; - std::shared_ptr imgBuf = std::make_shared(capacity); - int64_t timeStamp = 10; - imgBuf->SetInt64("timeUs", timeStamp); - imgBuf->SetInt32("Videoformat", static_cast(Videoformat::NV12)); - imgBuf->SetInt32("alignedWidth", TEST_WIDTH); - imgBuf->SetInt32("alignedHeight", TEST_HEIGTH); - imgBuf->SetInt32("width", TEST_WIDTH); - imgBuf->SetInt32("height", TEST_HEIGTH); - - int32_t rc = testColorFmtProcess_->GetImageUnitInfo(srcImgInfo, imgBuf); - EXPECT_EQ(rc, DCAMERA_OK); -} - -/** - * @tc.name: color_format_process_test_014 - * @tc.desc: Verify color format process CheckColorProcessInputInfo. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_014, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - VideoConfigParams srcParams(VideoCodecType::CODEC_H264, - Videoformat::NV12, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams destParams(VideoCodecType::CODEC_H264, - Videoformat::NV21, - DCAMERA_PRODUCER_FPS_DEFAULT, - TEST_WIDTH, - TEST_HEIGTH); - VideoConfigParams procConfig; - int32_t rc = testColorFmtProcess_->InitNode(srcParams, destParams, procConfig); - EXPECT_EQ(rc, DCAMERA_OK); - - ImageUnitInfo srcImgInfo {Videoformat::NV12, TEST_WIDTH, TEST_HEIGTH, 0, 0, 0, 0, nullptr}; - bool ret = testColorFmtProcess_->CheckColorProcessInputInfo(srcImgInfo); - EXPECT_EQ(ret, false); -} - -/** - * @tc.name: color_format_process_test_015 - * @tc.desc: Verify color format process func. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_015, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - ImageUnitInfo srcImgInfo {Videoformat::NV12, TEST_WIDTH, TEST_HEIGTH, 0, 0, 0, 0, nullptr}; - ImageUnitInfo dstImgInfo {Videoformat::NV12, TEST_WIDTH, TEST_HEIGTH, 0, 0, 0, 0, nullptr}; - bool ret = testColorFmtProcess_->CheckColorConvertInfo(srcImgInfo, dstImgInfo); - EXPECT_EQ(ret, false); - - int32_t rc = testColorFmtProcess_->CopyYPlane(srcImgInfo, dstImgInfo); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); - - rc = testColorFmtProcess_->SeparateNV12UVPlane(srcImgInfo, dstImgInfo); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); - - rc = testColorFmtProcess_->ColorConvertNV12ToI420(srcImgInfo, dstImgInfo); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); -} - -/** - * @tc.name: color_format_process_test_016 - * @tc.desc: Verify color format process func. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_016, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - uint8_t *imgData = new uint8_t(10); - ImageUnitInfo srcImgInfo {Videoformat::NV12, TEST_WIDTH, TEST_HEIGTH, 0, 0, 0, 0, imgData}; - ImageUnitInfo dstImgInfo {Videoformat::YUVI420, TEST_WIDTH2, TEST_HEIGTH2, 0, 0, 0, 0, imgData}; - int32_t rc = testColorFmtProcess_->CopyYPlane(srcImgInfo, dstImgInfo); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); - - rc = testColorFmtProcess_->SeparateNV12UVPlane(srcImgInfo, dstImgInfo); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); - - rc = testColorFmtProcess_->ColorConvertNV12ToI420(srcImgInfo, dstImgInfo); - EXPECT_EQ(rc, DCAMERA_BAD_VALUE); -} - -/** - * @tc.name: color_format_process_test_017 - * @tc.desc: Verify color format process func. - * @tc.type: FUNC - * @tc.require: Issue Number - */ -HWTEST_F(ColorFormatProcessTest, color_format_process_test_017, TestSize.Level1) -{ - EXPECT_EQ(false, testColorFmtProcess_ == nullptr); - - uint8_t *imgData = new uint8_t(10); - ImageUnitInfo srcImgInfo {Videoformat::NV12, TEST_WIDTH, TEST_HEIGTH, 0, 0, 0, 0, imgData}; - ImageUnitInfo dstImgInfo {Videoformat::RGBA_8888, TEST_WIDTH2, TEST_HEIGTH2, 0, 0, 0, 0, imgData}; - int32_t rc = testColorFmtProcess_->ColorConvertByColorFormat(srcImgInfo, dstImgInfo); - EXPECT_EQ(rc, DCAMERA_BAD_OPERATE); -} -} // namespace DistributedHardware -} // namespace OHOS diff --git a/services/data_process/test/unittest/common/pipeline_node/scale_convert_process_test.cpp b/services/data_process/test/unittest/common/pipeline_node/scale_convert_process_test.cpp index 4b01728e8c7d644bb8d594bd83d9675d4d443d8b..3e2222e80a6b51a8c7ebf31a705cd68c490a0b6a 100644 --- a/services/data_process/test/unittest/common/pipeline_node/scale_convert_process_test.cpp +++ b/services/data_process/test/unittest/common/pipeline_node/scale_convert_process_test.cpp @@ -61,6 +61,7 @@ void ScaleConvertProcessTest::TearDown(void) testScaleConvertProcess_ = nullptr; } +#ifdef DCAMERA_COMMON /** * @tc.name: scale_convert_process_test_001 * @tc.desc: Verify scale convert process InitNode IsConvertible true. @@ -467,5 +468,6 @@ HWTEST_F(ScaleConvertProcessTest, scale_convert_process_test_016, TestSize.Level int32_t rc = testScaleConvertProcess_->ConvertDone(outputBuffers); EXPECT_EQ(rc, DCAMERA_BAD_VALUE); } +#endif } // namespace DistributedHardware } // namespace OHOS