diff --git a/services/cameraservice/sourceservice/include/distributedcameramgr/dcameradata/dcamera_source_data_process.h b/services/cameraservice/sourceservice/include/distributedcameramgr/dcameradata/dcamera_source_data_process.h index 45e60497ebd2b7a4605983d8400e973dd6956baf..1087e3f10b9e9d4e15d1e4e40cc82db0095c9ad3 100644 --- a/services/cameraservice/sourceservice/include/distributedcameramgr/dcameradata/dcamera_source_data_process.h +++ b/services/cameraservice/sourceservice/include/distributedcameramgr/dcameradata/dcamera_source_data_process.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021 Huawei Device Co., Ltd. + * Copyright (c) 2021-2022 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -41,6 +41,9 @@ public: int32_t GetProducerSize() override; void GetAllStreamIds(std::vector& streamIds) override; +private: + void DestroyPipeline(); + private: std::mutex streamMutex_; std::vector> streamProcess_; diff --git a/services/cameraservice/sourceservice/include/distributedcameramgr/dcameradata/dcamera_stream_data_process.h b/services/cameraservice/sourceservice/include/distributedcameramgr/dcameradata/dcamera_stream_data_process.h index 10bd2210e325b95d566d3766363067a925e1ea3f..a8d42c0aa2e67cd97305eb939b216b5b14c12b35 100644 --- a/services/cameraservice/sourceservice/include/distributedcameramgr/dcameradata/dcamera_stream_data_process.h +++ b/services/cameraservice/sourceservice/include/distributedcameramgr/dcameradata/dcamera_stream_data_process.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021 Huawei Device Co., Ltd. + * Copyright (c) 2021-2022 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -44,12 +44,12 @@ public: void OnProcessedVideoBuffer(const std::shared_ptr& videoResult); void OnError(DataProcessErrorType errorType); + void DestroyPipeline(); private: void FeedStreamToSnapShot(const std::shared_ptr& buffer); void FeedStreamToContinue(const std::shared_ptr& buffer); void CreatePipeline(); - void DestroyPipeline(); VideoCodecType GetPipelineCodecType(DCEncodeType encodeType); Videoformat GetPipelineFormat(int32_t format); diff --git a/services/cameraservice/sourceservice/src/distributedcameramgr/dcameradata/dcamera_source_data_process.cpp b/services/cameraservice/sourceservice/src/distributedcameramgr/dcameradata/dcamera_source_data_process.cpp index e9e5974922fd37bbb028aafbe2a3ff53ecd18382..e8cf1e2db54210e1d7412686a1183035fd7c0ccc 100644 --- a/services/cameraservice/sourceservice/src/distributedcameramgr/dcameradata/dcamera_source_data_process.cpp +++ b/services/cameraservice/sourceservice/src/distributedcameramgr/dcameradata/dcamera_source_data_process.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021 Huawei Device Co., Ltd. + * Copyright (c) 2021-2022 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -171,9 +171,21 @@ int32_t DCameraSourceDataProcess::StopCapture(std::vector& streamIds) for (auto iter = streamProcess_.begin(); iter != streamProcess_.end(); iter++) { (*iter)->StopCapture(streamIdSet); } + if ((streamType_ == CONTINUOUS_FRAME) && (GetProducerSize() == 0)) { + DestroyPipeline(); + } return DCAMERA_OK; } +void DCameraSourceDataProcess::DestroyPipeline() +{ + DHLOGI("DCameraSourceDataProcess DestroyPipeline devId %s dhId %s streamType: %d", GetAnonyString(devId_).c_str(), + GetAnonyString(dhId_).c_str(), streamType_); + for (auto iter = streamProcess_.begin(); iter != streamProcess_.end(); iter++) { + (*iter)->DestroyPipeline(); + } +} + int32_t DCameraSourceDataProcess::GetProducerSize() { int32_t ret = 0; diff --git a/services/cameraservice/sourceservice/src/distributedcameramgr/dcameradata/dcamera_stream_data_process.cpp b/services/cameraservice/sourceservice/src/distributedcameramgr/dcameradata/dcamera_stream_data_process.cpp index 620dd69655b8e835aa808ef298db2faffbb86e75..8029e97e4dd3284dd03c365b2bff477c3cb8020c 100644 --- a/services/cameraservice/sourceservice/src/distributedcameramgr/dcameradata/dcamera_stream_data_process.cpp +++ b/services/cameraservice/sourceservice/src/distributedcameramgr/dcameradata/dcamera_stream_data_process.cpp @@ -47,8 +47,10 @@ DCameraStreamDataProcess::~DCameraStreamDataProcess() void DCameraStreamDataProcess::FeedStream(std::shared_ptr& buffer) { - DHLOGD("DCameraStreamDataProcess FeedStream devId %s dhId %s streamType %d streamSize: %d", - GetAnonyString(devId_).c_str(), GetAnonyString(dhId_).c_str(), streamType_, buffer->Size()); + for (auto streamId : streamIds_) { + DHLOGD("DCameraStreamDataProcess FeedStream devId %s dhId %s streamId %d streamType %d streamSize: %d", + GetAnonyString(devId_).c_str(), GetAnonyString(dhId_).c_str(), streamId, streamType_, buffer->Size()); + } switch (streamType_) { case SNAPSHOT_FRAME: { FeedStreamToSnapShot(buffer); @@ -66,16 +68,22 @@ void DCameraStreamDataProcess::FeedStream(std::shared_ptr& buffer) void DCameraStreamDataProcess::ConfigStreams(std::shared_ptr& dstConfig, std::set& streamIds) { - DHLOGI("DCameraStreamDataProcess ConfigStreams devId %s dhId %s streamType: %d", - GetAnonyString(devId_).c_str(), GetAnonyString(dhId_).c_str(), streamType_); + for (auto streamId : streamIds) { + DHLOGI("DCameraStreamDataProcess ConfigStreams devId %s dhId %s streamId %d, width: %d, height: %d, " + + "format: %d, dataspace: %d, encodeType: %d, streamType: %d", GetAnonyString(devId_).c_str(), + GetAnonyString(dhId_).c_str(), streamId, dstConfig->width_, dstConfig->height_, dstConfig->format_, + dstConfig->dataspace_, dstConfig->encodeType_, dstConfig->type_); + } dstConfig_ = dstConfig; streamIds_ = streamIds; } void DCameraStreamDataProcess::ReleaseStreams(std::set& streamIds) { - DHLOGI("DCameraStreamDataProcess ReleaseStreams devId %s dhId %s streamType: %d", - GetAnonyString(devId_).c_str(), GetAnonyString(dhId_).c_str(), streamType_); + for (auto streamId : streamIds) { + DHLOGI("DCameraStreamDataProcess ReleaseStreams devId %s dhId %s streamId %d streamType %d", + GetAnonyString(devId_).c_str(), GetAnonyString(dhId_).c_str(), streamId, streamType_); + } std::lock_guard autoLock(producerMutex_); for (auto iter = streamIds.begin(); iter != streamIds.end(); iter++) { int32_t streamId = *iter; @@ -109,6 +117,7 @@ void DCameraStreamDataProcess::StartCapture(std::shared_ptr std::lock_guard autoLock(producerMutex_); for (auto iter = streamIds_.begin(); iter != streamIds_.end(); iter++) { uint32_t streamId = *iter; + DHLOGI("DCameraStreamDataProcess StartCapture streamId: %d", streamId); if (streamIds.find(streamId) == streamIds.end()) { continue; } @@ -138,6 +147,7 @@ void DCameraStreamDataProcess::StopCapture(std::set& streamIds) std::lock_guard autoLock(producerMutex_); for (auto iter = streamIds_.begin(); iter != streamIds_.end(); iter++) { uint32_t streamId = *iter; + DHLOGI("DCameraStreamDataProcess StopCapture streamId: %d", streamId); if (streamIds.find(streamId) == streamIds.end()) { continue; } @@ -156,9 +166,6 @@ void DCameraStreamDataProcess::StopCapture(std::set& streamIds) producerIter = producers_.erase(producerIter); } } - if (streamType_ == CONTINUOUS_FRAME && producers_.empty()) { - DestroyPipeline(); - } } void DCameraStreamDataProcess::GetAllStreamIds(std::set& streamIds) diff --git a/services/data_process/BUILD.gn b/services/data_process/BUILD.gn index 524f581905e9521790c6fbaef4c6ed7035974526..72e5e377d5af72c9746bc1171a4c89efb74b1c65 100644 --- a/services/data_process/BUILD.gn +++ b/services/data_process/BUILD.gn @@ -18,6 +18,7 @@ import( ohos_shared_library("distributed_camera_data_process") { include_dirs = [ + "//third_party/ffmpeg/", "//utils/native/base/include", "//utils/system/safwk/native/include", "//foundation/graphic/graphic_2d/interfaces/innerkits/common", @@ -40,6 +41,7 @@ ohos_shared_library("distributed_camera_data_process") { "include/pipeline_node/multimedia_codec/encoder", "include/pipeline_node/colorspace_conversion", "include/pipeline_node/fpscontroller", + "include/pipeline_node/scale_conversion", "${common_path}/include/constants", "${common_path}/include/utils", "${innerkits_path}/native_cpp/camera_source/include", @@ -54,6 +56,7 @@ ohos_shared_library("distributed_camera_data_process") { "src/pipeline_node/multimedia_codec/decoder/decode_surface_listener.cpp", "src/pipeline_node/multimedia_codec/decoder/decode_video_callback.cpp", "src/pipeline_node/multimedia_codec/encoder/encode_video_callback.cpp", + "src/pipeline_node/scale_conversion/scale_convert_process.cpp", "src/utils/image_common_type.cpp", ] @@ -89,6 +92,8 @@ ohos_shared_library("distributed_camera_data_process") { "multimedia_player_framework:media_client", ] + public_deps = [ "//third_party/ffmpeg:libohosffmpeg" ] + subsystem_name = "distributedhardware" part_name = "distributed_camera" diff --git a/services/data_process/include/pipeline_node/scale_conversion/scale_convert_process.h b/services/data_process/include/pipeline_node/scale_conversion/scale_convert_process.h new file mode 100644 index 0000000000000000000000000000000000000000..922f968bb5d76c5cf29f33c8ac0c9dae03db29fd --- /dev/null +++ b/services/data_process/include/pipeline_node/scale_conversion/scale_convert_process.h @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef OHOS_SCALE_CONVERT_PROCESS_H +#define OHOS_SCALE_CONVERT_PROCESS_H + +#include "abstract_data_process.h" + +#ifdef __cplusplus +extern "C" { +#endif +#include +#include +#include +#include +#ifdef __cplusplus +}; +#endif + +#include +#include + +#include "dcamera_pipeline_source.h" +#include "image_common_type.h" + +namespace OHOS { +namespace DistributedHardware { +class ScaleConvertProcess : public AbstractDataProcess { +public: + explicit ScaleConvertProcess(const std::weak_ptr& callbackPipeSource) + : callbackPipelineSource_(callbackPipeSource) {} + ~ScaleConvertProcess(); + + int32_t InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) override; + int32_t ProcessData(std::vector>& inputBuffers) override; + void ReleaseProcessNode() override; + +private: + bool IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig); + bool IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo); + bool CheckScaleProcessInputInfo(const ImageUnitInfo& srcImgInfo); + bool CheckScaleConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); + int32_t GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::shared_ptr& imgBuf); + int32_t ScaleConvert(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); + int32_t CopyYUV420SrcData(const ImageUnitInfo& srcImgInfo); + int32_t CopyNV12SrcData(const ImageUnitInfo& srcImgInfo); + int32_t CopyNV21SrcData(const ImageUnitInfo& srcImgInfo); + int32_t ConvertDone(std::vector>& outputBuffers); + AVPixelFormat GetAVPixelFormat(Videoformat colorFormat); + +private: + constexpr static int32_t DATA_LEN = 4; + constexpr static int32_t MEMORY_RATIO_NV = 2; + constexpr static int32_t MEMORY_RATIO_YUV = 4; + constexpr static int32_t SOURCE_ALIGN = 16; + constexpr static int32_t TARGET_ALIGN = 1; + constexpr static int32_t YUV_BYTES_PER_PIXEL = 3; + constexpr static int32_t Y2UV_RATIO = 2; + + uint8_t *srcData_[DATA_LEN]; + uint8_t *dstData_[DATA_LEN]; + int32_t srcLineSize_[DATA_LEN]; + int32_t dstLineSize_[DATA_LEN]; + int32_t dstBuffSize_; + SwsContext *swsContext_ = nullptr; + VideoConfigParams sourceConfig_; + VideoConfigParams targetConfig_; + VideoConfigParams processedConfig_; + std::weak_ptr callbackPipelineSource_; + std::atomic isScaleConvert_ = false; + std::mutex scaleMutex_; +}; +} // namespace DistributedHardware +} // namespace OHOS +#endif // OHOS_SCALE_CONVERT_PROCESS_H diff --git a/services/data_process/src/pipeline/dcamera_pipeline_source.cpp b/services/data_process/src/pipeline/dcamera_pipeline_source.cpp index 11abb907ef005913c15ca557f096f6deaad5dab3..d571a5653c9aa559ba4de8eb659276424b5d8868 100644 --- a/services/data_process/src/pipeline/dcamera_pipeline_source.cpp +++ b/services/data_process/src/pipeline/dcamera_pipeline_source.cpp @@ -21,6 +21,7 @@ #include "color_format_process.h" #include "decode_data_process.h" #include "fps_controller_process.h" +#include "scale_convert_process.h" namespace OHOS { namespace DistributedHardware { @@ -102,6 +103,7 @@ int32_t DCameraPipelineSource::InitDCameraPipNodes(const VideoConfigParams& sour } pipNodeRanks_.push_back(std::make_shared(eventBusSource_, shared_from_this())); + pipNodeRanks_.push_back(std::make_shared(shared_from_this())); pipNodeRanks_.push_back(std::make_shared(shared_from_this())); if (pipNodeRanks_.size() == 0) { DHLOGD("Creating an empty source pipeline."); @@ -112,6 +114,9 @@ int32_t DCameraPipelineSource::InitDCameraPipNodes(const VideoConfigParams& sour VideoConfigParams curNodeSourceCfg = sourceConfig; for (size_t i = 0; i < pipNodeRanks_.size(); i++) { pipNodeRanks_[i]->SetNodeRank(i); + DHLOGI("DCameraPipelineSource::InitDCameraPipNodes Node %d Source Config: width %d height %d " + + "format %d codecType %d frameRate %d", i, curNodeSourceCfg.GetWidth(), curNodeSourceCfg.GetHeight(), + curNodeSourceCfg.GetVideoformat(), curNodeSourceCfg.GetVideoCodecType(), curNodeSourceCfg.GetFrameRate()); VideoConfigParams curNodeProcessedCfg; int32_t err = pipNodeRanks_[i]->InitNode(curNodeSourceCfg, targetConfig, curNodeProcessedCfg); @@ -131,7 +136,9 @@ int32_t DCameraPipelineSource::InitDCameraPipNodes(const VideoConfigParams& sour return DCAMERA_INIT_ERR; } } - DHLOGD("All nodes have been linked in source pipeline."); + DHLOGD("All nodes have been linked in source pipeline, Target Config: " + + "width %d height %d format %d codecType %d frameRate %d", targetConfig.GetWidth(), targetConfig.GetHeight(), + targetConfig.GetVideoformat(), targetConfig.GetVideoCodecType(), targetConfig.GetFrameRate()); pipelineHead_ = pipNodeRanks_[0]; return DCAMERA_OK; } diff --git a/services/data_process/src/pipeline_node/scale_conversion/scale_convert_process.cpp b/services/data_process/src/pipeline_node/scale_conversion/scale_convert_process.cpp new file mode 100644 index 0000000000000000000000000000000000000000..19f410313248a2a1b0d782689a4ee64600624b08 --- /dev/null +++ b/services/data_process/src/pipeline_node/scale_conversion/scale_convert_process.cpp @@ -0,0 +1,402 @@ +/* + * Copyright (c) 2022 Huawei Device Co., Ltd. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "scale_convert_process.h" + +#include "distributed_hardware_log.h" +#include "distributed_camera_errno.h" + +namespace OHOS { +namespace DistributedHardware { +ScaleConvertProcess::~ScaleConvertProcess() +{ + if (isScaleConvert_.load()) { + DHLOGI("~ScaleConvertProcess : ReleaseProcessNode"); + ReleaseProcessNode(); + } +} + +int32_t ScaleConvertProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) +{ + DHLOGI("ScaleConvertProcess : InitNode."); + sourceConfig_ = sourceConfig; + targetConfig_ = targetConfig; + processedConfig_ = sourceConfig; + processedConfig_.SetWidthAndHeight(targetConfig.GetWidth(), targetConfig.GetHeight()); + processedConfig = processedConfig_; + + if (!IsConvertible(sourceConfig, targetConfig)) { + DHLOGI("sourceConfig: Videoformat %d Width %d, Height %d, targetConfig: Videoformat %d Width %d, Height %d.", + sourceConfig.GetVideoformat(), sourceConfig.GetWidth(), sourceConfig.GetHeight(), + targetConfig.GetVideoformat(), targetConfig.GetWidth(), targetConfig.GetHeight()); + isScaleConvert_.store(true); + return DCAMERA_OK; + } + + int32_t ret = av_image_alloc(srcData_, srcLineSize_, sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), + GetAVPixelFormat(sourceConfig_.GetVideoformat()), SOURCE_ALIGN); + if (ret < DCAMERA_OK) { + DHLOGE("Could not allocate source image."); + return DCAMERA_BAD_VALUE; + } + + dstBuffSize_ = av_image_alloc(dstData_, dstLineSize_, processedConfig_.GetWidth(), processedConfig_.GetHeight(), + GetAVPixelFormat(processedConfig_.GetVideoformat()), TARGET_ALIGN); + if (dstBuffSize_ < DCAMERA_OK) { + DHLOGE("Could not allocate destination image."); + return DCAMERA_BAD_VALUE; + } + + swsContext_ = sws_getContext(sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), + GetAVPixelFormat(sourceConfig_.GetVideoformat()), processedConfig_.GetWidth(), processedConfig_.GetHeight(), + GetAVPixelFormat(processedConfig_.GetVideoformat()), SWS_FAST_BILINEAR, nullptr, nullptr, nullptr); + if (swsContext_ == nullptr) { + DHLOGE("Create SwsContext failed."); + return DCAMERA_BAD_VALUE; + } + + isScaleConvert_.store(true); + return DCAMERA_OK; +} + +bool ScaleConvertProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) +{ + return (sourceConfig_.GetWidth() != targetConfig.GetWidth()) || + (sourceConfig_.GetHeight() != targetConfig.GetHeight()); +} + +void ScaleConvertProcess::ReleaseProcessNode() +{ + DHLOGI("Start release [%d] node : ScaleConvertNode.", nodeRank_); + isScaleConvert_.store(false); + if (nextDataProcess_ != nullptr) { + nextDataProcess_->ReleaseProcessNode(); + nextDataProcess_ = nullptr; + } + + std::lock_guard autoLock(scaleMutex_); + if (swsContext_ != nullptr) { + av_freep(&srcData_[0]); + av_freep(&dstData_[0]); + sws_freeContext(swsContext_); + swsContext_ = nullptr; + } +} + +int ScaleConvertProcess::ProcessData(std::vector>& inputBuffers) +{ + DHLOGD("Process data in ScaleConvertProcess."); + if (!isScaleConvert_.load()) { + DHLOGE("Scale Convert node occurred error or start release."); + return DCAMERA_DISABLE_PROCESS; + } + + if (inputBuffers.empty() || inputBuffers[0] == nullptr) { + DHLOGE("The input data buffers is empty."); + return DCAMERA_BAD_VALUE; + } + + if (!IsConvertible(sourceConfig_, processedConfig_)) { + DHLOGD("The target resolution: %dx%d is the same as the source resolution: %dx%d", + processedConfig_.GetWidth(), processedConfig_.GetHeight(), + sourceConfig_.GetWidth(), sourceConfig_.GetHeight()); + return ConvertDone(inputBuffers); + } + + int64_t timeStamp = 0; + if (!(inputBuffers[0]->FindInt64("timeUs", timeStamp))) { + DHLOGE("ScaleConvertProcess : Find inputBuffer timeStamp failed."); + return DCAMERA_BAD_VALUE; + } + + ImageUnitInfo srcImgInfo {Videoformat::YUVI420, 0, 0, 0, 0, 0, 0, nullptr}; + if ((GetImageUnitInfo(srcImgInfo, inputBuffers[0]) != DCAMERA_OK) || !CheckScaleProcessInputInfo(srcImgInfo)) { + DHLOGE("ScaleConvertProcess : srcImgInfo error."); + return DCAMERA_BAD_VALUE; + } + + std::shared_ptr dstBuf = std::make_shared(dstBuffSize_); + ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), processedConfig_.GetWidth(), + processedConfig_.GetHeight(), processedConfig_.GetWidth(), processedConfig_.GetHeight(), + processedConfig_.GetWidth() * processedConfig_.GetHeight(), dstBuf->Size(), dstBuf->Data() }; + if (ScaleConvert(srcImgInfo, dstImgInfo) != DCAMERA_OK) { + DHLOGE("ScaleConvertProcess : Scale convert failed."); + return DCAMERA_BAD_OPERATE; + } + + dstBuf->SetInt64("timeUs", timeStamp); + dstBuf->SetInt32("Videoformat", static_cast(processedConfig_.GetVideoformat())); + dstBuf->SetInt32("alignedWidth", processedConfig_.GetWidth()); + dstBuf->SetInt32("alignedHeight", processedConfig_.GetHeight()); + dstBuf->SetInt32("width", processedConfig_.GetWidth()); + dstBuf->SetInt32("height", processedConfig_.GetHeight()); + + std::vector> outputBuffers; + outputBuffers.push_back(dstBuf); + return ConvertDone(outputBuffers); +} + +int32_t ScaleConvertProcess::GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::shared_ptr& imgBuf) +{ + if (imgBuf == nullptr) { + DHLOGE("GetImageUnitInfo failed, imgBuf is nullptr."); + return DCAMERA_BAD_VALUE; + } + + bool findErr = true; + int32_t colorFormat = 0; + findErr = findErr && imgBuf->FindInt32("Videoformat", colorFormat); + if (!findErr) { + DHLOGE("GetImageUnitInfo failed, Videoformat is null."); + return DCAMERA_NOT_FOUND; + } + if (colorFormat != static_cast(Videoformat::YUVI420) && + colorFormat != static_cast(Videoformat::NV12) && + colorFormat != static_cast(Videoformat::NV21)) { + DHLOGE("GetImageUnitInfo failed, colorFormat %d are not supported.", colorFormat); + return DCAMERA_NOT_FOUND; + } + imgInfo.colorFormat = static_cast(colorFormat); + findErr = findErr && imgBuf->FindInt32("width", imgInfo.width); + findErr = findErr && imgBuf->FindInt32("height", imgInfo.height); + findErr = findErr && imgBuf->FindInt32("alignedWidth", imgInfo.alignedWidth); + findErr = findErr && imgBuf->FindInt32("alignedHeight", imgInfo.alignedHeight); + if (!findErr) { + DHLOGE("GetImageUnitInfo failed, width %d, height %d, alignedWidth %d, alignedHeight %d.", + imgInfo.width, imgInfo.height, imgInfo.alignedWidth, imgInfo.alignedHeight); + return DCAMERA_NOT_FOUND; + } + + imgInfo.chromaOffset = static_cast(imgInfo.alignedWidth * imgInfo.alignedHeight); + imgInfo.imgSize = imgBuf->Size(); + imgInfo.imgData = imgBuf->Data(); + if (imgInfo.imgData == nullptr) { + DHLOGE("Get the imgData of the imgBuf failed."); + return DCAMERA_BAD_VALUE; + } + DHLOGD("ScaleConvertProcess imgBuf info : Videoformat %d, alignedWidth %d, alignedHeight %d, width %d, height %d" + + ", chromaOffset %d, imgSize %d.", imgInfo.colorFormat, imgInfo.width, imgInfo.height, imgInfo.alignedWidth, + imgInfo.alignedHeight, imgInfo.chromaOffset, imgInfo.imgSize); + return DCAMERA_OK; +} + +bool ScaleConvertProcess::CheckScaleProcessInputInfo(const ImageUnitInfo& srcImgInfo) +{ + return srcImgInfo.colorFormat == sourceConfig_.GetVideoformat() && + srcImgInfo.alignedWidth == sourceConfig_.GetWidth() && + srcImgInfo.alignedHeight == sourceConfig_.GetHeight() && + IsCorrectImageUnitInfo(srcImgInfo); +} + +bool ScaleConvertProcess::CheckScaleConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +{ + if (srcImgInfo.imgData == nullptr || dstImgInfo.imgData == nullptr) { + DHLOGE("The imgData of srcImgInfo or the imgData of dstImgInfo are null!"); + return false; + } + + if (!IsCorrectImageUnitInfo(srcImgInfo)) { + DHLOGE("srcImginfo fail: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " + + "imgSize %lld.", srcImgInfo.width, srcImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight, + srcImgInfo.chromaOffset, srcImgInfo.imgSize); + return false; + } + + if (!IsCorrectImageUnitInfo(dstImgInfo)) { + DHLOGE("dstImginfo fail: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " + + "imgSize %lld.", dstImgInfo.width, dstImgInfo.height, dstImgInfo.alignedWidth, dstImgInfo.alignedHeight, + dstImgInfo.chromaOffset, dstImgInfo.imgSize); + return false; + } + + if ((dstImgInfo.width == srcImgInfo.alignedWidth) && (dstImgInfo.height == srcImgInfo.alignedHeight)) { + DHLOGE("Comparison ImgInfo fail: dstwidth %d, dstheight %d, srcAlignedWidth %d, srcAlignedHeight %d.", + dstImgInfo.width, dstImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight); + return false; + } + + return true; +} + +bool ScaleConvertProcess::IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo) +{ + size_t expectedImgSize = static_cast(imgInfo.alignedWidth * imgInfo.alignedHeight * + YUV_BYTES_PER_PIXEL / Y2UV_RATIO); + size_t expectedChromaOffset = static_cast(imgInfo.alignedWidth * imgInfo.alignedHeight); + return (imgInfo.width <= imgInfo.alignedWidth && imgInfo.height <= imgInfo.alignedHeight && + imgInfo.imgSize >= expectedImgSize && imgInfo.chromaOffset == expectedChromaOffset); +} + +int32_t ScaleConvertProcess::ScaleConvert(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +{ + DHLOGD("ScaleConvertProcess : Scale convert start."); + if (!CheckScaleConvertInfo(srcImgInfo, dstImgInfo)) { + DHLOGE("ScaleConvertProcess : CheckScaleConvertInfo failed."); + return DCAMERA_BAD_VALUE; + } + + std::lock_guard autoLock(scaleMutex_); + switch (GetAVPixelFormat(srcImgInfo.colorFormat)) { + case AV_PIX_FMT_YUV420P: { + int32_t ret = CopyYUV420SrcData(srcImgInfo); + if (ret != DCAMERA_OK) { + DHLOGE("ScaleConvertProcess::ScaleConvert copy yuv420p src data failed."); + return ret; + } + break; + } + case AV_PIX_FMT_NV12: { + int32_t ret = CopyNV12SrcData(srcImgInfo); + if (ret != DCAMERA_OK) { + DHLOGE("ScaleConvertProcess::ScaleConvert copy nv12 src data failed."); + return ret; + } + break; + } + case AV_PIX_FMT_NV21: { + int32_t ret = CopyNV21SrcData(srcImgInfo); + if (ret != DCAMERA_OK) { + DHLOGE("ScaleConvertProcess::ScaleConvert copy nv21 src data failed."); + return ret; + } + break; + } + default: + DHLOGE("Unknown pixel format not support."); + return DCAMERA_BAD_VALUE; + } + + sws_scale(swsContext_, (const uint8_t * const *)srcData_, srcLineSize_, 0, srcImgInfo.alignedHeight, + dstData_, dstLineSize_); + int32_t ret = memcpy_s(dstImgInfo.imgData, dstImgInfo.imgSize, dstData_[0], dstBuffSize_); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::ScaleConvert copy dst image info failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; + } + return DCAMERA_OK; +} + +int32_t ScaleConvertProcess::CopyYUV420SrcData(const ImageUnitInfo& srcImgInfo) +{ + int32_t ret = memcpy_s(srcData_[0], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, + srcImgInfo.imgData, srcImgInfo.alignedWidth * srcImgInfo.alignedHeight); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyYUV420SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; + } + + ret = memcpy_s(srcData_[1], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV, + srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyYUV420SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; + } + + ret = memcpy_s(srcData_[2], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV, + srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight + + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV, + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_YUV); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyYUV420SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; + } + return DCAMERA_OK; +} + +int32_t ScaleConvertProcess::CopyNV12SrcData(const ImageUnitInfo& srcImgInfo) +{ + int32_t ret = memcpy_s(srcData_[0], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, + srcImgInfo.imgData, srcImgInfo.alignedWidth * srcImgInfo.alignedHeight); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyNV12SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; + } + + ret = memcpy_s(srcData_[1], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_NV, + srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_NV); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyNV12SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; + } + return DCAMERA_OK; +} + +int32_t ScaleConvertProcess::CopyNV21SrcData(const ImageUnitInfo& srcImgInfo) +{ + int32_t ret = memcpy_s(srcData_[0], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, + srcImgInfo.imgData, srcImgInfo.alignedWidth * srcImgInfo.alignedHeight); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyNV21SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; + } + + ret = memcpy_s(srcData_[1], srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_NV, + srcImgInfo.imgData + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight, + srcImgInfo.alignedWidth * srcImgInfo.alignedHeight / MEMORY_RATIO_NV); + if (ret != EOK) { + DHLOGE("ScaleConvertProcess::CopyNV21SrcData memory copy failed, ret = %d", ret); + return DCAMERA_MEMORY_OPT_ERROR; + } + return DCAMERA_OK; +} + +int32_t ScaleConvertProcess::ConvertDone(std::vector>& outputBuffers) +{ + DHLOGD("ScaleConvertProcess : Convert Done."); + if (outputBuffers.empty()) { + DHLOGE("The received data buffer is empty."); + return DCAMERA_BAD_VALUE; + } + + if (nextDataProcess_ != nullptr) { + DHLOGD("Send to the next node of the scale convert for processing."); + int32_t err = nextDataProcess_->ProcessData(outputBuffers); + if (err != DCAMERA_OK) { + DHLOGE("Some node after the scale convert processes failed."); + } + return err; + } + + DHLOGD("The current node is the last noed, and output the processed video buffer."); + std::shared_ptr targetPipelineSource = callbackPipelineSource_.lock(); + if (targetPipelineSource == nullptr) { + DHLOGE("callbackPipelineSource_ is nullptr."); + return DCAMERA_BAD_VALUE; + } + targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]); + return DCAMERA_OK; +} + +AVPixelFormat ScaleConvertProcess::GetAVPixelFormat(Videoformat colorFormat) +{ + AVPixelFormat format; + switch (colorFormat) { + case Videoformat::NV12: + format = AVPixelFormat::AV_PIX_FMT_NV12; + break; + case Videoformat::NV21: + format = AVPixelFormat::AV_PIX_FMT_NV21; + break; + default: + format = AVPixelFormat::AV_PIX_FMT_YUV420P; + break; + } + return format; +} +} // namespace DistributedHardware +} // namespace OHOS