From f2f66ebac39efe6be8ea4ab6b21098756d558766 Mon Sep 17 00:00:00 2001 From: t00605578 Date: Thu, 5 May 2022 17:50:45 +0800 Subject: [PATCH 1/4] Adapts to the ColorConvertNode upgrade Signed-off-by: t00605578 --- services/data_process/BUILD.gn | 2 +- .../include/pipeline/abstract_data_process.h | 3 +- .../include/pipeline/dcamera_pipeline_sink.h | 11 +- .../pipeline/dcamera_pipeline_source.h | 11 +- ..._nv12_to_nv21.h => color_format_process.h} | 39 ++- .../fpscontroller/fps_controller_process.h | 15 +- .../decoder/decode_data_process.h | 29 +- .../encoder/encode_data_process.h | 30 +- .../include/utils/image_common_type.h | 35 +- .../src/pipeline/dcamera_pipeline_sink.cpp | 17 +- .../src/pipeline/dcamera_pipeline_source.cpp | 21 +- ...2_to_nv21.cpp => color_format_process.cpp} | 302 ++++++++++++------ .../fpscontroller/fps_controller_process.cpp | 20 +- .../decoder/decode_data_process.cpp | 156 +++++---- .../decoder/decode_data_process_common.cpp | 129 +++++--- .../encoder/encode_data_process.cpp | 128 +++++--- .../encoder/encode_data_process_common.cpp | 131 +++++--- .../src/utils/image_common_type.cpp | 10 +- 18 files changed, 710 insertions(+), 379 deletions(-) rename services/data_process/include/pipeline_node/colorspace_conversion/{convert_nv12_to_nv21.h => color_format_process.h} (53%) rename services/data_process/src/pipeline_node/colorspace_conversion/{convert_nv12_to_nv21.cpp => color_format_process.cpp} (59%) diff --git a/services/data_process/BUILD.gn b/services/data_process/BUILD.gn index 75a37513..fc7e8faf 100644 --- a/services/data_process/BUILD.gn +++ b/services/data_process/BUILD.gn @@ -49,7 +49,7 @@ ohos_shared_library("distributed_camera_data_process") { "src/pipeline/abstract_data_process.cpp", "src/pipeline/dcamera_pipeline_sink.cpp", "src/pipeline/dcamera_pipeline_source.cpp", - "src/pipeline_node/colorspace_conversion/convert_nv12_to_nv21.cpp", + "src/pipeline_node/colorspace_conversion/color_format_process.cpp", "src/pipeline_node/fpscontroller/fps_controller_process.cpp", "src/pipeline_node/multimedia_codec/decoder/decode_surface_listener.cpp", "src/pipeline_node/multimedia_codec/decoder/decode_video_callback.cpp", diff --git a/services/data_process/include/pipeline/abstract_data_process.h b/services/data_process/include/pipeline/abstract_data_process.h index 7f18737c..b50c523c 100644 --- a/services/data_process/include/pipeline/abstract_data_process.h +++ b/services/data_process/include/pipeline/abstract_data_process.h @@ -31,7 +31,8 @@ public: int32_t SetNextNode(std::shared_ptr& nextDataProcess); void SetNodeRank(size_t curNodeRank); - virtual int32_t InitNode() = 0; + virtual int32_t InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) = 0; virtual int32_t ProcessData(std::vector>& inputBuffers) = 0; virtual void ReleaseProcessNode() = 0; diff --git a/services/data_process/include/pipeline/dcamera_pipeline_sink.h b/services/data_process/include/pipeline/dcamera_pipeline_sink.h index 60f6c23d..200d14ec 100644 --- a/services/data_process/include/pipeline/dcamera_pipeline_sink.h +++ b/services/data_process/include/pipeline/dcamera_pipeline_sink.h @@ -54,11 +54,12 @@ private: private: const static std::string PIPELINE_OWNER; - constexpr static uint32_t MAX_FRAME_RATE = 30; - constexpr static uint32_t MIN_VIDEO_WIDTH = 320; - constexpr static uint32_t MIN_VIDEO_HEIGHT = 240; - constexpr static uint32_t MAX_VIDEO_WIDTH = 1920; - constexpr static uint32_t MAX_VIDEO_HEIGHT = 1080; + constexpr static int32_t MIN_FRAME_RATE = 0; + constexpr static int32_t MAX_FRAME_RATE = 30; + constexpr static int32_t MIN_VIDEO_WIDTH = 320; + constexpr static int32_t MIN_VIDEO_HEIGHT = 240; + constexpr static int32_t MAX_VIDEO_WIDTH = 1920; + constexpr static int32_t MAX_VIDEO_HEIGHT = 1080; std::shared_ptr processListener_ = nullptr; std::shared_ptr pipelineHead_ = nullptr; diff --git a/services/data_process/include/pipeline/dcamera_pipeline_source.h b/services/data_process/include/pipeline/dcamera_pipeline_source.h index 14e0ed5b..2d7310a2 100644 --- a/services/data_process/include/pipeline/dcamera_pipeline_source.h +++ b/services/data_process/include/pipeline/dcamera_pipeline_source.h @@ -57,11 +57,12 @@ private: private: const static std::string PIPELINE_OWNER; - constexpr static uint32_t MAX_FRAME_RATE = 30; - constexpr static uint32_t MIN_VIDEO_WIDTH = 320; - constexpr static uint32_t MIN_VIDEO_HEIGHT = 240; - constexpr static uint32_t MAX_VIDEO_WIDTH = 1920; - constexpr static uint32_t MAX_VIDEO_HEIGHT = 1080; + constexpr static int32_t MIN_FRAME_RATE = 0; + constexpr static int32_t MAX_FRAME_RATE = 30; + constexpr static int32_t MIN_VIDEO_WIDTH = 320; + constexpr static int32_t MIN_VIDEO_HEIGHT = 240; + constexpr static int32_t MAX_VIDEO_WIDTH = 1920; + constexpr static int32_t MAX_VIDEO_HEIGHT = 1080; std::shared_ptr processListener_ = nullptr; std::shared_ptr pipelineHead_ = nullptr; diff --git a/services/data_process/include/pipeline_node/colorspace_conversion/convert_nv12_to_nv21.h b/services/data_process/include/pipeline_node/colorspace_conversion/color_format_process.h similarity index 53% rename from services/data_process/include/pipeline_node/colorspace_conversion/convert_nv12_to_nv21.h rename to services/data_process/include/pipeline_node/colorspace_conversion/color_format_process.h index 5f950716..19a981fe 100644 --- a/services/data_process/include/pipeline_node/colorspace_conversion/convert_nv12_to_nv21.h +++ b/services/data_process/include/pipeline_node/colorspace_conversion/color_format_process.h @@ -13,27 +13,35 @@ * limitations under the License. */ -#ifndef OHOS_CONVERT_NV12TONV21_H -#define OHOS_CONVERT_NV12TONV21_H +#ifndef OHOS_COLOR_FORMAT_PROCESS_H +#define OHOS_COLOR_FORMAT_PROCESS_H #include "securec.h" + +#include "abstract_data_process.h" #include "data_buffer.h" +#include "dcamera_pipeline_source.h" #include "image_common_type.h" namespace OHOS { namespace DistributedHardware { -class ConvertNV12ToNV21 { +class ColorFormatProcess : public AbstractDataProcess{ public: - ConvertNV12ToNV21() = default; - ~ConvertNV12ToNV21() = default; - std::shared_ptr ProcessData(const std::shared_ptr& srcBuf, - const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig); + explicit ColorFormatProcess(const std::weak_ptr& callbackPipSource) + : callbackPipelineSource_(callbackPipSource) {} + ~ColorFormatProcess(); + + int32_t InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) override; + int32_t ProcessData(std::vector>& inputBuffers) override; + void ReleaseProcessNode() override; private: bool IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig); int32_t GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::shared_ptr& imgBuf); + bool CheckColorProcessInputInfo(const ImageUnitInfo& srcImgInfo); + bool CheckColorConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); bool IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo); - int32_t CheckColorConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); void SeparateUVPlaneByRow(const uint8_t *srcUVPlane, uint8_t *dstUPlane, uint8_t *dstVPlane, int32_t srcHalfWidth); int32_t SeparateNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); @@ -42,7 +50,20 @@ private: int32_t CombineNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); int32_t CopyYPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); int32_t ColorConvertNV12ToNV21(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); + int32_t ColorConvertNV12ToI420(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); + int32_t ColorConvertByColorFormat(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo); + int32_t ColorFormatDone(std::vector>& outputBuffers); + +private: + constexpr static int32_t YUV_BYTES_PER_PIXEL = 3; + constexpr static int32_t Y2UV_RATIO = 2; + + std::weak_ptr callbackPipelineSource_; + VideoConfigParams sourceConfig_; + VideoConfigParams targetConfig_; + VideoConfigParams processedConfig_; + std::atomic isColorFormatProcess_ = false; }; } // namespace DistributedHardware } // namespace OHOS -#endif // OHOS_CONVERT_NV12TONV21_H +#endif // OHOS_COLOR_FORMAT_PROCESS_H diff --git a/services/data_process/include/pipeline_node/fpscontroller/fps_controller_process.h b/services/data_process/include/pipeline_node/fpscontroller/fps_controller_process.h index 29a48c8b..f56a90a2 100644 --- a/services/data_process/include/pipeline_node/fpscontroller/fps_controller_process.h +++ b/services/data_process/include/pipeline_node/fpscontroller/fps_controller_process.h @@ -27,12 +27,12 @@ class DCameraPipelineSource; class FpsControllerProcess : public AbstractDataProcess { public: - FpsControllerProcess(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, - const std::weak_ptr& callbackPipSource) - : sourceConfig_(sourceConfig), targetConfig_(targetConfig), callbackPipelineSource_(callbackPipSource) {} + explicit FpsControllerProcess(const std::weak_ptr& callbackPipSource) + : callbackPipelineSource_(callbackPipSource) {} ~FpsControllerProcess(); - int32_t InitNode() override; + int32_t InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) override; int32_t ProcessData(std::vector>& inputBuffers) override; void ReleaseProcessNode() override; @@ -46,7 +46,7 @@ private: int32_t FpsControllerDone(std::vector>& outputBuffers); private: - constexpr static uint32_t MAX_TARGET_FRAME_RATE = 30; + constexpr static int32_t MAX_TARGET_FRAME_RATE = 30; constexpr static int32_t VIDEO_FRAME_DROP_INTERVAL = 4; constexpr static int32_t MIN_INCOME_FRAME_NUM_COEFFICIENT = 3; constexpr static int32_t INCOME_FRAME_TIME_HISTORY_WINDOWS_SIZE = 60; @@ -56,13 +56,14 @@ private: constexpr static int32_t OVERSHOOT_MODIFY_COEFFICIENT = 3; constexpr static int32_t DOUBLE_MULTIPLE = 2; + std::weak_ptr callbackPipelineSource_; std::mutex mtx; VideoConfigParams sourceConfig_; VideoConfigParams targetConfig_; - std::weak_ptr callbackPipelineSource_; + VideoConfigParams processedConfig_; bool isFpsControllerProcess_ = false; bool isFirstFrame_ = false; - uint32_t targetFrameRate_ = 0; + int32_t targetFrameRate_ = 0; int64_t lastFrameIncomeTimeMs_ = 0; /* the time span between current and last frame */ int64_t recentFrameTimeSpanMs_ = -1; diff --git a/services/data_process/include/pipeline_node/multimedia_codec/decoder/decode_data_process.h b/services/data_process/include/pipeline_node/multimedia_codec/decoder/decode_data_process.h index ac6d755c..dd0937a2 100644 --- a/services/data_process/include/pipeline_node/multimedia_codec/decoder/decode_data_process.h +++ b/services/data_process/include/pipeline_node/multimedia_codec/decoder/decode_data_process.h @@ -51,14 +51,13 @@ class DecodeVideoCallback; class DecodeDataProcess : public EventSender, public EventBusHandler, public AbstractDataProcess, public std::enable_shared_from_this { public: - DecodeDataProcess(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, - const std::shared_ptr& eventBusPipeline, + DecodeDataProcess(const std::shared_ptr& eventBusPipeline, const std::weak_ptr& callbackPipSource) - : sourceConfig_(sourceConfig), targetConfig_(targetConfig), eventBusPipeline_(eventBusPipeline), - callbackPipelineSource_(callbackPipSource) {} + : eventBusPipeline_(eventBusPipeline), callbackPipelineSource_(callbackPipSource) {} ~DecodeDataProcess(); - int32_t InitNode() override; + int32_t InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) override; int32_t ProcessData(std::vector>& inputBuffers) override; void ReleaseProcessNode() override; void OnEvent(DCameraCodecEvent& ev) override; @@ -77,8 +76,10 @@ private: bool IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig); void InitCodecEvent(); int32_t InitDecoder(); + int32_t ConfigureVideoDecoder(); int32_t InitDecoderMetadataFormat(); int32_t SetDecoderOutputSurface(); + int32_t StartVideoDecoder(); int32_t StopVideoDecoder(); void ReleaseVideoDecoder(); void ReleaseDecoderSurface(); @@ -100,22 +101,24 @@ private: constexpr static int32_t VIDEO_DECODER_QUEUE_MAX = 1000; constexpr static int32_t MAX_YUV420_BUFFER_SIZE = 1920 * 1080 * 3 / 2 * 2; constexpr static int32_t MAX_RGB32_BUFFER_SIZE = 1920 * 1080 * 4 * 2; - constexpr static uint32_t MAX_FRAME_RATE = 30; - constexpr static uint32_t MIN_VIDEO_WIDTH = 320; - constexpr static uint32_t MIN_VIDEO_HEIGHT = 240; - constexpr static uint32_t MAX_VIDEO_WIDTH = 1920; - constexpr static uint32_t MAX_VIDEO_HEIGHT = 1080; + constexpr static int32_t MIN_FRAME_RATE = 0; + constexpr static int32_t MAX_FRAME_RATE = 30; + constexpr static int32_t MIN_VIDEO_WIDTH = 320; + constexpr static int32_t MIN_VIDEO_HEIGHT = 240; + constexpr static int32_t MAX_VIDEO_WIDTH = 1920; + constexpr static int32_t MAX_VIDEO_HEIGHT = 1080; constexpr static int32_t FIRST_FRAME_INPUT_NUM = 2; constexpr static int32_t RGB32_MEMORY_COEFFICIENT = 4; constexpr static int32_t YUV_BYTES_PER_PIXEL = 3; constexpr static int32_t Y2UV_RATIO = 2; + std::shared_ptr eventBusPipeline_; + std::weak_ptr callbackPipelineSource_; std::mutex mtxDecoderState_; std::mutex mtxHoldCount_; VideoConfigParams sourceConfig_; VideoConfigParams targetConfig_; - std::shared_ptr eventBusPipeline_; - std::weak_ptr callbackPipelineSource_; + VideoConfigParams processedConfig_; std::shared_ptr eventBusDecode_ = nullptr; std::shared_ptr eventBusRegHandleDecode_ = nullptr; std::shared_ptr eventBusRegHandlePipeline2Decode_ = nullptr; @@ -125,7 +128,7 @@ private: sptr decodeProducerSurface_ = nullptr; sptr decodeSurfaceListener_ = nullptr; - bool isDecoderProcess_ = false; + std::atomic isDecoderProcess_ = false; int32_t waitDecoderOutputCount_ = 0; int32_t alignedHeight_ = 0; int64_t lastFeedDecoderInputBufferTimeUs_ = 0; diff --git a/services/data_process/include/pipeline_node/multimedia_codec/encoder/encode_data_process.h b/services/data_process/include/pipeline_node/multimedia_codec/encoder/encode_data_process.h index 0ee7f96f..a7e9275d 100644 --- a/services/data_process/include/pipeline_node/multimedia_codec/encoder/encode_data_process.h +++ b/services/data_process/include/pipeline_node/multimedia_codec/encoder/encode_data_process.h @@ -41,12 +41,12 @@ class EncodeVideoCallback; class EncodeDataProcess : public AbstractDataProcess, public std::enable_shared_from_this { public: - EncodeDataProcess(const VideoConfigParams &sourceConfig, const VideoConfigParams &targetConfig, - const std::weak_ptr& callbackPipSink) - : sourceConfig_(sourceConfig), targetConfig_(targetConfig), callbackPipelineSink_(callbackPipSink) {} + explicit EncodeDataProcess(const std::weak_ptr& callbackPipSink) + : callbackPipelineSink_(callbackPipSink) {} ~EncodeDataProcess(); - int32_t InitNode() override; + int32_t InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) override; int32_t ProcessData(std::vector>& inputBuffers) override; void ReleaseProcessNode() override; @@ -61,15 +61,17 @@ private: bool IsInEncoderRange(const VideoConfigParams& curConfig); bool IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig); int32_t InitEncoder(); + int32_t ConfigureVideoEncoder(); int32_t InitEncoderMetadataFormat(); int32_t InitEncoderBitrateFormat(); + int32_t StartVideoEncoder(); int32_t StopVideoEncoder(); void ReleaseVideoEncoder(); int32_t FeedEncoderInputBuffer(std::shared_ptr& inputBuffer); sptr GetEncoderInputSurfaceBuffer(); int64_t GetEncoderTimeStamp(); - void IncreaseWaitDecodeCnt(); - void ReduceWaitDecodeCnt(); + void IncreaseWaitEncodeCnt(); + void ReduceWaitEncodeCnt(); int32_t GetEncoderOutputBuffer(uint32_t index, Media::AVCodecBufferInfo info); int32_t EncodeDone(std::vector>& outputBuffers); @@ -77,11 +79,12 @@ private: constexpr static int32_t ENCODER_STRIDE_ALIGNMENT = 8; constexpr static int64_t NORM_YUV420_BUFFER_SIZE = 1920 * 1080 * 3 / 2; constexpr static int32_t NORM_RGB32_BUFFER_SIZE = 1920 * 1080 * 4; - constexpr static uint32_t MAX_FRAME_RATE = 30; - constexpr static uint32_t MIN_VIDEO_WIDTH = 320; - constexpr static uint32_t MIN_VIDEO_HEIGHT = 240; - constexpr static uint32_t MAX_VIDEO_WIDTH = 1920; - constexpr static uint32_t MAX_VIDEO_HEIGHT = 1080; + constexpr static int32_t MIN_FRAME_RATE = 0; + constexpr static int32_t MAX_FRAME_RATE = 30; + constexpr static int32_t MIN_VIDEO_WIDTH = 320; + constexpr static int32_t MIN_VIDEO_HEIGHT = 240; + constexpr static int32_t MAX_VIDEO_WIDTH = 1920; + constexpr static int32_t MAX_VIDEO_HEIGHT = 1080; constexpr static int32_t IDR_FRAME_INTERVAL_MS = 300; constexpr static int32_t FIRST_FRAME_OUTPUT_NUM = 2; @@ -107,16 +110,17 @@ private: constexpr static int32_t BITRATE_6000000 = 6000000; const static std::map ENCODER_BITRATE_TABLE; + std::weak_ptr callbackPipelineSink_; std::mutex mtxEncoderState_; std::mutex mtxHoldCount_; VideoConfigParams sourceConfig_; VideoConfigParams targetConfig_; - std::weak_ptr callbackPipelineSink_; + VideoConfigParams processedConfig_; std::shared_ptr videoEncoder_ = nullptr; std::shared_ptr encodeVideoCallback_ = nullptr; sptr encodeProducerSurface_ = nullptr; - bool isEncoderProcess_ = false; + std::atomic isEncoderProcess_ = false; int32_t waitEncoderOutputCount_ = 0; int64_t lastFeedEncoderInputBufferTimeUs_ = 0; int64_t inputTimeStampUs_ = 0; diff --git a/services/data_process/include/utils/image_common_type.h b/services/data_process/include/utils/image_common_type.h index a176c85d..235f8607 100644 --- a/services/data_process/include/utils/image_common_type.h +++ b/services/data_process/include/utils/image_common_type.h @@ -23,45 +23,50 @@ namespace OHOS { namespace DistributedHardware { enum class PipelineType : int32_t { VIDEO = 0, - PHOTO_JPEG, + PHOTO_JPEG = 1, }; enum class VideoCodecType : int32_t { NO_CODEC = 0, - CODEC_H264, - CODEC_H265, + CODEC_H264 = 1, + CODEC_H265 = 2, + CODEC_MPEG4 = 3, }; enum class Videoformat : int32_t { YUVI420 = 0, - NV12, - NV21, + NV12 = 1, + NV21 = 2, + RGBA_8888 = 3, }; class VideoConfigParams { public: - VideoConfigParams(VideoCodecType videoCodec, Videoformat pixelFormat, uint32_t frameRate, uint32_t width, - uint32_t height) + VideoConfigParams() : videoCodec_(VideoCodecType::NO_CODEC), pixelFormat_(Videoformat::YUVI420), + frameRate_(0), width_ (0), height_(0) + {} + VideoConfigParams(VideoCodecType videoCodec, Videoformat pixelFormat, int32_t frameRate, int32_t width, + int32_t height) : videoCodec_(videoCodec), pixelFormat_(pixelFormat), frameRate_(frameRate), width_ (width), height_(height) {} ~VideoConfigParams() = default; void SetVideoCodecType(VideoCodecType videoCodec); void SetVideoformat(Videoformat pixelFormat); - void SetFrameRate(uint32_t frameRate); - void SetWidthAndHeight(uint32_t width, uint32_t height); + void SetFrameRate(int32_t frameRate); + void SetWidthAndHeight(int32_t width, int32_t height); VideoCodecType GetVideoCodecType() const; Videoformat GetVideoformat() const; - uint32_t GetFrameRate() const; - uint32_t GetWidth() const; - uint32_t GetHeight() const; + int32_t GetFrameRate() const; + int32_t GetWidth() const; + int32_t GetHeight() const; private: VideoCodecType videoCodec_; Videoformat pixelFormat_; - uint32_t frameRate_; - uint32_t width_; - uint32_t height_; + int32_t frameRate_; + int32_t width_; + int32_t height_; }; struct ImageUnitInfo { diff --git a/services/data_process/src/pipeline/dcamera_pipeline_sink.cpp b/services/data_process/src/pipeline/dcamera_pipeline_sink.cpp index afe508fa..a6119a00 100644 --- a/services/data_process/src/pipeline/dcamera_pipeline_sink.cpp +++ b/services/data_process/src/pipeline/dcamera_pipeline_sink.cpp @@ -69,9 +69,9 @@ int32_t DCameraPipelineSink::CreateDataProcessPipeline(PipelineType piplineType, bool DCameraPipelineSink::IsInRange(const VideoConfigParams& curConfig) { - return (curConfig.GetFrameRate() <= MAX_FRAME_RATE || curConfig.GetWidth() >= MIN_VIDEO_WIDTH || - curConfig.GetWidth() <= MAX_VIDEO_WIDTH || curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || - curConfig.GetHeight() <= MAX_VIDEO_HEIGHT); + return (curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE || + curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH || + curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT); } int32_t DCameraPipelineSink::InitDCameraPipNodes(const VideoConfigParams& sourceConfig, @@ -83,22 +83,29 @@ int32_t DCameraPipelineSink::InitDCameraPipNodes(const VideoConfigParams& source return DCAMERA_NOT_FOUND; } - pipNodeRanks_.push_back(std::make_shared(sourceConfig, targetConfig, shared_from_this())); + pipNodeRanks_.push_back(std::make_shared(shared_from_this())); if (pipNodeRanks_.size() == 0) { DHLOGD("Creating an empty sink pipeline."); pipelineHead_ = nullptr; return DCAMERA_BAD_VALUE; } + + VideoConfigParams curNodeSourceCfg = sourceConfig; for (size_t i = 0; i < pipNodeRanks_.size(); i++) { pipNodeRanks_[i]->SetNodeRank(i); - int32_t err = pipNodeRanks_[i]->InitNode(); + + VideoConfigParams curNodeProcessedCfg; + int32_t err = pipNodeRanks_[i]->InitNode(curNodeSourceCfg, targetConfig, curNodeProcessedCfg); if (err != DCAMERA_OK) { DHLOGE("Init sink DCamera pipeline Node [%d] failed.", i); return DCAMERA_INIT_ERR; } + curNodeSourceCfg = curNodeProcessedCfg; + if (i == 0) { continue; } + err = pipNodeRanks_[i - 1]->SetNextNode(pipNodeRanks_[i]); if (err != DCAMERA_OK) { DHLOGE("Set the next node of Node [%d] failed in sink pipeline.", i - 1); diff --git a/services/data_process/src/pipeline/dcamera_pipeline_source.cpp b/services/data_process/src/pipeline/dcamera_pipeline_source.cpp index ddb80e5b..2bdd2255 100644 --- a/services/data_process/src/pipeline/dcamera_pipeline_source.cpp +++ b/services/data_process/src/pipeline/dcamera_pipeline_source.cpp @@ -17,6 +17,7 @@ #include "distributed_hardware_log.h" +#include "color_format_process.h" #include "decode_data_process.h" #include "fps_controller_process.h" @@ -72,9 +73,9 @@ int32_t DCameraPipelineSource::CreateDataProcessPipeline(PipelineType piplineTyp bool DCameraPipelineSource::IsInRange(const VideoConfigParams& curConfig) { - return (curConfig.GetFrameRate() <= MAX_FRAME_RATE || curConfig.GetWidth() >= MIN_VIDEO_WIDTH || - curConfig.GetWidth() <= MAX_VIDEO_WIDTH || curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || - curConfig.GetHeight() <= MAX_VIDEO_HEIGHT); + return (curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE || + curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH || + curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT); } void DCameraPipelineSource::InitDCameraPipEvent() @@ -97,23 +98,31 @@ int32_t DCameraPipelineSource::InitDCameraPipNodes(const VideoConfigParams& sour DHLOGE("eventBusSource is nullptr."); return DCAMERA_BAD_VALUE; } - pipNodeRanks_.push_back(std::make_shared(sourceConfig, targetConfig, - eventBusSource_, shared_from_this())); + + pipNodeRanks_.push_back(std::make_shared(eventBusSource_, shared_from_this())); + pipNodeRanks_.push_back(std::make_shared(shared_from_this())); if (pipNodeRanks_.size() == 0) { DHLOGD("Creating an empty source pipeline."); pipelineHead_ = nullptr; return DCAMERA_BAD_VALUE; } + + VideoConfigParams curNodeSourceCfg = sourceConfig; for (size_t i = 0; i < pipNodeRanks_.size(); i++) { pipNodeRanks_[i]->SetNodeRank(i); - int32_t err = pipNodeRanks_[i]->InitNode(); + + VideoConfigParams curNodeProcessedCfg; + int32_t err = pipNodeRanks_[i]->InitNode(curNodeSourceCfg, targetConfig, curNodeProcessedCfg); if (err != DCAMERA_OK) { DHLOGE("Init source DCamera pipeline Node [%d] failed.", i); return DCAMERA_INIT_ERR; } + curNodeSourceCfg = curNodeProcessedCfg; + if (i == 0) { continue; } + err = pipNodeRanks_[i - 1]->SetNextNode(pipNodeRanks_[i]); if (err != DCAMERA_OK) { DHLOGE("Set the next node of Node [%d] failed in source pipeline.", i - 1); diff --git a/services/data_process/src/pipeline_node/colorspace_conversion/convert_nv12_to_nv21.cpp b/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp similarity index 59% rename from services/data_process/src/pipeline_node/colorspace_conversion/convert_nv12_to_nv21.cpp rename to services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp index 798f39d6..d35718d3 100644 --- a/services/data_process/src/pipeline_node/colorspace_conversion/convert_nv12_to_nv21.cpp +++ b/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp @@ -13,7 +13,7 @@ * limitations under the License. */ -#include "convert_nv12_to_nv21.h" +#include "color_format_process.h" #include "distributed_hardware_log.h" @@ -21,13 +21,104 @@ namespace OHOS { namespace DistributedHardware { -bool ConvertNV12ToNV21::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) +ColorFormatProcess::~ColorFormatProcess() +{ + if (isColorFormatProcess_.load()) { + DHLOGD("~ColorFormatProcess : ReleaseProcessNode."); + ReleaseProcessNode(); + } +} + +int32_t ColorFormatProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) +{ + DHLOGD("ColorFormatProcess : InitNode."); + if (!IsConvertible(sourceConfig, targetConfig)) { + DHLOGE("sourceConfig: Videoformat %d Width %d, Height %d, targetConfig: Videoformat %d Width %d, Height %d.", + sourceConfig.GetVideoformat(), sourceConfig.GetWidth(), sourceConfig.GetHeight(), + targetConfig.GetVideoformat(), targetConfig.GetWidth(), targetConfig.GetHeight()); + return DCAMERA_BAD_TYPE; + } + + sourceConfig_ = sourceConfig; + targetConfig_ = targetConfig; + processedConfig_ = sourceConfig; + + if (sourceConfig_.GetVideoformat() != targetConfig_.GetVideoformat()) { + processedConfig_.SetVideoformat(targetConfig_.GetVideoformat()); + } + + processedConfig = processedConfig_; + isColorFormatProcess_.store(true); + return DCAMERA_OK; +} + +bool ColorFormatProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) { return (sourceConfig.GetVideoformat() == Videoformat::NV12 && targetConfig.GetVideoformat() == Videoformat::NV21 && sourceConfig.GetWidth() == targetConfig.GetWidth() && sourceConfig.GetHeight() == targetConfig.GetHeight()); } -int32_t ConvertNV12ToNV21::GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::shared_ptr& imgBuf) +void ColorFormatProcess::ReleaseProcessNode() +{ + DHLOGD("Start release [%d] node : ColorFormatNode.", nodeRank_); + isColorFormatProcess_.store(false); + + if (nextDataProcess_ != nullptr) { + nextDataProcess_->ReleaseProcessNode(); + } + nextDataProcess_ = nullptr; +} + +int32_t ColorFormatProcess::ProcessData(std::vector>& inputBuffers) +{ + DHLOGD("Process data in ColorFormatProcess."); + if (inputBuffers.empty() || inputBuffers[0] == nullptr) { + DHLOGE("The input data buffers is empty."); + return DCAMERA_BAD_VALUE; + } + + if (sourceConfig_.GetVideoformat() == targetConfig_.GetVideoformat()) { + DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.", + sourceConfig_.GetVideoformat(), targetConfig_.GetVideoformat()); + return ColorFormatDone(inputBuffers); + } + + int64_t timeStamp = 0; + if (!(inputBuffers[0]->FindInt64("timeUs", timeStamp))) { + DHLOGE("ColorConvertProcess : Find inputBuffer timeStamp failed."); + return DCAMERA_BAD_VALUE; + } + + ImageUnitInfo srcImgInfo {Videoformat::YUVI420, 0, 0, 0, 0, 0, 0, nullptr}; + if (GetImageUnitInfo(srcImgInfo, inputBuffers[0]) != DCAMERA_OK || !CheckColorProcessInputInfo(srcImgInfo)) { + DHLOGE("ColorConvertProcess : srcImgInfo error."); + return DCAMERA_BAD_VALUE; + } + + size_t dstBufsize = sourceConfig_.GetWidth() * sourceConfig_.GetHeight() * YUV_BYTES_PER_PIXEL / Y2UV_RATIO; + std::shared_ptr dstBuf = std::make_shared(dstBufsize); + ImageUnitInfo dstImgInfo = { targetConfig_.GetVideoformat(), sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), + sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), sourceConfig_.GetWidth() * sourceConfig_.GetHeight(), + dstBuf->Size(), dstBuf->Data() }; + if (ColorConvertByColorFormat(srcImgInfo, dstImgInfo) != DCAMERA_OK) { + DHLOGE("ColorConvertProcess : ColorConvertByColorFormat failed."); + return DCAMERA_BAD_OPERATE; + } + + dstBuf->SetInt64("timeUs", timeStamp); + dstBuf->SetInt32("Videoformat", static_cast(targetConfig_.GetVideoformat())); + dstBuf->SetInt32("alignedWidth", sourceConfig_.GetWidth()); + dstBuf->SetInt32("alignedHeight", sourceConfig_.GetHeight()); + dstBuf->SetInt32("width", sourceConfig_.GetWidth()); + dstBuf->SetInt32("height", sourceConfig_.GetHeight()); + + std::vector> outputBuffers; + outputBuffers.push_back(dstBuf); + return ColorFormatDone(outputBuffers); +} + +int32_t ColorFormatProcess::GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::shared_ptr& imgBuf) { if (imgBuf == nullptr) { DHLOGE("GetImageUnitInfo failed, imgBuf is nullptr."); @@ -71,48 +162,54 @@ int32_t ConvertNV12ToNV21::GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::s return DCAMERA_OK; } -bool ConvertNV12ToNV21::IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo) +bool ColorFormatProcess::CheckColorProcessInputInfo(const ImageUnitInfo& srcImgInfo) { - int32_t y2UvRatio = 2; - int32_t bytesPerPixel = 3; - size_t expectedImgSize = static_cast(imgInfo.alignedWidth * imgInfo.alignedHeight * - bytesPerPixel / y2UvRatio); - size_t expectedChromaOffset = static_cast(imgInfo.alignedWidth * imgInfo.alignedHeight); - return (imgInfo.width <= imgInfo.alignedWidth && imgInfo.height <= imgInfo.alignedHeight && - imgInfo.imgSize >= expectedImgSize && imgInfo.chromaOffset == expectedChromaOffset); + return srcImgInfo.colorFormat == sourceConfig_.GetVideoformat() && + srcImgInfo.alignedWidth == sourceConfig_.GetWidth() && + srcImgInfo.alignedHeight == sourceConfig_.GetHeight() && + IsCorrectImageUnitInfo(srcImgInfo); } -int32_t ConvertNV12ToNV21::CheckColorConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +bool ColorFormatProcess::CheckColorConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) { if (srcImgInfo.imgData == nullptr || dstImgInfo.imgData == nullptr) { DHLOGE("The imgData of srcImgInfo or the imgData of dstImgInfo are null!"); - return DCAMERA_BAD_VALUE; + return false; } if (srcImgInfo.colorFormat != Videoformat::NV12 && dstImgInfo.colorFormat != Videoformat::NV21) { DHLOGE("CopyInfo error : srcImgInfo colorFormat %d, dstImgInfo colorFormat %d.", srcImgInfo.colorFormat, dstImgInfo.colorFormat); - return DCAMERA_BAD_VALUE; + return false; } if (!IsCorrectImageUnitInfo(srcImgInfo)) { DHLOGE("srcImginfo fail: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " + "imgSize %lld.", srcImgInfo.width, srcImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight, srcImgInfo.chromaOffset, srcImgInfo.imgSize); - return DCAMERA_BAD_VALUE; + return false; } if (!IsCorrectImageUnitInfo(dstImgInfo)) { DHLOGE("dstImginfo fail: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " + "imgSize %lld.", dstImgInfo.width, dstImgInfo.height, dstImgInfo.alignedWidth, dstImgInfo.alignedHeight, dstImgInfo.chromaOffset, dstImgInfo.imgSize); - return DCAMERA_BAD_VALUE; + return false; } if (dstImgInfo.width > srcImgInfo.alignedWidth || dstImgInfo.height > srcImgInfo.alignedHeight) { DHLOGE("Comparison ImgInfo fail: dstwidth %d, dstheight %d, srcAlignedWidth %d, srcAlignedHeight %d.", dstImgInfo.width, dstImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight); - return DCAMERA_BAD_VALUE; + return false; } - return DCAMERA_OK; + return true; +} + +bool ColorFormatProcess::IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo) +{ + size_t expectedImgSize = static_cast(imgInfo.alignedWidth * imgInfo.alignedHeight * + YUV_BYTES_PER_PIXEL / Y2UV_RATIO); + size_t expectedChromaOffset = static_cast(imgInfo.alignedWidth * imgInfo.alignedHeight); + return (imgInfo.width <= imgInfo.alignedWidth && imgInfo.height <= imgInfo.alignedHeight && + imgInfo.imgSize >= expectedImgSize && imgInfo.chromaOffset == expectedChromaOffset); } /** @@ -120,7 +217,7 @@ int32_t ConvertNV12ToNV21::CheckColorConvertInfo(const ImageUnitInfo& srcImgInfo * converts the UVPlane memory arrangement of NV12 to the UV memory arrangement of YUVI420. Note that the * stride and width of the dstImage must be the same. */ -void ConvertNV12ToNV21::SeparateUVPlaneByRow(const uint8_t *srcUVPlane, uint8_t *dstUPlane, uint8_t *dstVPlane, +void ColorFormatProcess::SeparateUVPlaneByRow(const uint8_t *srcUVPlane, uint8_t *dstUPlane, uint8_t *dstVPlane, int32_t srcHalfWidth) { int32_t memoryOffset0 = 0; @@ -141,23 +238,21 @@ void ConvertNV12ToNV21::SeparateUVPlaneByRow(const uint8_t *srcUVPlane, uint8_t } } -int32_t ConvertNV12ToNV21::SeparateNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +int32_t ColorFormatProcess::SeparateNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) { - int32_t ret = CheckColorConvertInfo(srcImgInfo, dstImgInfo); - if (ret != DCAMERA_OK) { - DHLOGE("ColorConvert : CheckColorConvertInfo failed."); - return ret; + if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) { + DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed."); + return DCAMERA_BAD_VALUE; } - int32_t y2UvRatio = 2; uint8_t *srcUVPlane = srcImgInfo.imgData + srcImgInfo.chromaOffset; int32_t srcUVStride = srcImgInfo.alignedWidth; uint8_t *dstUPlane = dstImgInfo.imgData + dstImgInfo.chromaOffset; - int32_t dstUStride = dstImgInfo.alignedWidth / y2UvRatio; - uint8_t *dstVPlane = dstUPlane + (dstImgInfo.chromaOffset / y2UvRatio) / y2UvRatio; - int32_t dstVStride = dstImgInfo.alignedWidth / y2UvRatio; - int32_t width = srcImgInfo.width / y2UvRatio; - int32_t height = srcImgInfo.height / y2UvRatio; + int32_t dstUStride = dstImgInfo.alignedWidth / Y2UV_RATIO; + uint8_t *dstVPlane = dstUPlane + (dstImgInfo.chromaOffset / Y2UV_RATIO) / Y2UV_RATIO; + int32_t dstVStride = dstImgInfo.alignedWidth / Y2UV_RATIO; + int32_t width = srcImgInfo.width / Y2UV_RATIO; + int32_t height = srcImgInfo.height / Y2UV_RATIO; DHLOGD("srcUVStride %d, dstUStride %d, dstVStride %d, src half width %d, src half height %d.", srcUVStride, dstUStride, dstVStride, width, height); @@ -170,7 +265,7 @@ int32_t ConvertNV12ToNV21::SeparateNV12UVPlane(const ImageUnitInfo& srcImgInfo, dstVStride = -dstVStride; } /* No black border of srcImage and dstImage, and the strides of srcImage and dstImage are equal. */ - if (srcUVStride == width * y2UvRatio && dstUStride == width && dstVStride == width) { + if (srcUVStride == width * Y2UV_RATIO && dstUStride == width && dstVStride == width) { SeparateUVPlaneByRow(srcUVPlane, dstUPlane, dstVPlane, width * height); return DCAMERA_OK; } @@ -189,7 +284,7 @@ int32_t ConvertNV12ToNV21::SeparateNV12UVPlane(const ImageUnitInfo& srcImgInfo, * converts the UVPlane memory arrangement of YUVI420 to the UV memory arrangement of NV12. Note that the * stride and width of the srcImage must be the same. */ -void ConvertNV12ToNV21::CombineUVPlaneByRow(const uint8_t *srcUPlane, const uint8_t *srcVPlane, uint8_t *dstUVPlane, +void ColorFormatProcess::CombineUVPlaneByRow(const uint8_t *srcUPlane, const uint8_t *srcVPlane, uint8_t *dstUVPlane, int32_t dstHalfWidth) { int32_t memoryOffset0 = 0; @@ -210,23 +305,21 @@ void ConvertNV12ToNV21::CombineUVPlaneByRow(const uint8_t *srcUPlane, const uint } } -int32_t ConvertNV12ToNV21::CombineNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +int32_t ColorFormatProcess::CombineNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) { - int32_t ret = CheckColorConvertInfo(srcImgInfo, dstImgInfo); - if (ret != DCAMERA_OK) { - DHLOGE("ColorConvert : CheckColorConvertInfo failed."); - return ret; + if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) { + DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed."); + return DCAMERA_BAD_VALUE; } - int32_t y2UvRatio = 2; uint8_t *srcVPlane = srcImgInfo.imgData + srcImgInfo.chromaOffset; - int32_t srcVStride = srcImgInfo.alignedWidth / y2UvRatio; - uint8_t *srcUPlane = srcVPlane + (srcImgInfo.chromaOffset / y2UvRatio) / y2UvRatio; - int32_t srcUStride = srcImgInfo.alignedWidth / y2UvRatio; + int32_t srcVStride = srcImgInfo.alignedWidth / Y2UV_RATIO; + uint8_t *srcUPlane = srcVPlane + (srcImgInfo.chromaOffset / Y2UV_RATIO) / Y2UV_RATIO; + int32_t srcUStride = srcImgInfo.alignedWidth / Y2UV_RATIO; uint8_t *dstUVPlane = dstImgInfo.imgData + dstImgInfo.chromaOffset; int32_t dstUVStride = dstImgInfo.alignedWidth; - int32_t width = dstImgInfo.width / y2UvRatio; - int32_t height = dstImgInfo.height / y2UvRatio; + int32_t width = dstImgInfo.width / Y2UV_RATIO; + int32_t height = dstImgInfo.height / Y2UV_RATIO; DHLOGD("srcUStride %d, srcVStride %d, dstUVStride %d, dst half width %d, dst half height %d.", srcUStride, srcVStride, dstUVStride, width, height); @@ -237,7 +330,7 @@ int32_t ConvertNV12ToNV21::CombineNV12UVPlane(const ImageUnitInfo& srcImgInfo, c dstUVStride = -dstUVStride; } /* No black border of srcImage and dstImage, and the strides of srcImage and dstImage are equal. */ - if (srcUStride == width && srcVStride == width && dstUVStride == width * y2UvRatio) { + if (srcUStride == width && srcVStride == width && dstUVStride == width * Y2UV_RATIO) { CombineUVPlaneByRow(srcUPlane, srcVPlane, dstUVPlane, width * height); return DCAMERA_OK; } @@ -251,12 +344,11 @@ int32_t ConvertNV12ToNV21::CombineNV12UVPlane(const ImageUnitInfo& srcImgInfo, c return DCAMERA_OK; } -int32_t ConvertNV12ToNV21::CopyYPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +int32_t ColorFormatProcess::CopyYPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) { - int32_t ret = CheckColorConvertInfo(srcImgInfo, dstImgInfo); - if (ret != DCAMERA_OK) { - DHLOGE("ColorConvert : CheckColorConvertInfo failed."); - return ret; + if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) { + DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed."); + return DCAMERA_BAD_VALUE; } errno_t err = EOK; @@ -290,14 +382,14 @@ int32_t ConvertNV12ToNV21::CopyYPlane(const ImageUnitInfo& srcImgInfo, const Ima return DCAMERA_OK; } -int32_t ConvertNV12ToNV21::ColorConvertNV12ToNV21(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +int32_t ColorFormatProcess::ColorConvertNV12ToNV21(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) { - int32_t err = CheckColorConvertInfo(srcImgInfo, dstImgInfo); - if (err != DCAMERA_OK) { - DHLOGE("ColorConvertNV12ToNV21 : CheckColorConvertInfo failed."); - return err; + if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) { + DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed."); + return DCAMERA_BAD_VALUE; } - err = CopyYPlane(srcImgInfo, dstImgInfo); + + int32_t err = CopyYPlane(srcImgInfo, dstImgInfo); if (err != DCAMERA_OK) { DHLOGE("ColorConvertNV12ToNV21 : CopyYPlane failed."); return err; @@ -311,53 +403,73 @@ int32_t ConvertNV12ToNV21::ColorConvertNV12ToNV21(const ImageUnitInfo& srcImgInf return DCAMERA_OK; } -std::shared_ptr ConvertNV12ToNV21::ProcessData(const std::shared_ptr& srcBuf, - const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) +int32_t ColorFormatProcess::ColorConvertNV12ToI420(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) { - if (srcBuf == nullptr) { - DHLOGE("ColorConvertProcessData : srcBuf is null."); - return nullptr; + if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) { + DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed."); + return DCAMERA_BAD_VALUE; } - if (!IsConvertible(sourceConfig, targetConfig)) { - DHLOGE("ColorConvertProcessData : Only supported convert videoformat NV12 to NV21."); - DHLOGE("sourceConfig: Videoformat %d Width %d, Height %d, targetConfig: Videoformat %d Width %d, Height %d.", - sourceConfig.GetVideoformat(), sourceConfig.GetWidth(), sourceConfig.GetHeight(), - targetConfig.GetVideoformat(), targetConfig.GetWidth(), targetConfig.GetHeight()); - return nullptr; + + int32_t err = CopyYPlane(srcImgInfo, dstImgInfo); + if (err != DCAMERA_OK) { + DHLOGE("ColorConvertNV12ToNV21 : CopyYPlane failed."); + return err; } - int64_t timeStamp = 0; - if (!(srcBuf->FindInt64("timeUs", timeStamp))) { - DHLOGE("ColorConvertProcessData : Find srcBuf timeStamp failed."); - return nullptr; + + SeparateNV12UVPlane(srcImgInfo, dstImgInfo); + return DCAMERA_OK; +} + +int32_t ColorFormatProcess::ColorConvertByColorFormat(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo) +{ + int32_t ret; + switch (srcImgInfo.colorFormat) { + case Videoformat::NV12: + switch(dstImgInfo.colorFormat) { + case Videoformat::NV21: + ret = ColorConvertNV12ToNV21(srcImgInfo, dstImgInfo); + break; + case Videoformat::YUVI420: + ret = ColorConvertNV12ToI420(srcImgInfo, dstImgInfo); + break; + default: + DHLOGE("Unsupport ColorConvert %d to %d.", srcImgInfo.colorFormat, dstImgInfo.colorFormat); + return DCAMERA_BAD_OPERATE; + } + break; + case Videoformat::NV21: + case Videoformat::YUVI420: + case Videoformat::RGBA_8888: + DHLOGE("Unsupport ColorConvert %d to %d.", srcImgInfo.colorFormat, dstImgInfo.colorFormat); + return DCAMERA_BAD_OPERATE; } + return ret; +} - ImageUnitInfo srcImgInfo {Videoformat::YUVI420, 0, 0, 0, 0, 0, 0, nullptr}; - if (GetImageUnitInfo(srcImgInfo, srcBuf) != DCAMERA_OK) { - DHLOGE("ColorConvertProcessData : Get srcImgInfo failed."); - return nullptr; +int32_t ColorFormatProcess::ColorFormatDone(std::vector>& outputBuffers) +{ + DHLOGD("ColorFormat Done."); + if (outputBuffers.empty()) { + DHLOGE("The received data buffers is empty."); + return DCAMERA_BAD_VALUE; } - int32_t y2UvRatio = 2; - int32_t bytesPerPixel = 3; - size_t dstBufsize = sourceConfig.GetWidth() * sourceConfig.GetHeight() * bytesPerPixel / y2UvRatio; - std::shared_ptr dstBuf = std::make_shared(dstBufsize); - ImageUnitInfo dstImgInfo = { targetConfig.GetVideoformat(), static_cast(sourceConfig.GetWidth()), - static_cast(sourceConfig.GetHeight()), static_cast(sourceConfig.GetWidth()), - static_cast(sourceConfig.GetHeight()), sourceConfig.GetWidth() * sourceConfig.GetHeight(), - dstBuf->Size(), dstBuf->Data() }; - int32_t err = ColorConvertNV12ToNV21(srcImgInfo, dstImgInfo); - if (err != DCAMERA_OK) { - return nullptr; + + if (nextDataProcess_ != nullptr) { + DHLOGD("Send to the next node of the decoder for processing."); + int32_t err = nextDataProcess_->ProcessData(outputBuffers); + if (err != DCAMERA_OK) { + DHLOGE("Someone node after the decoder processes failed."); + } + return err; } - dstBuf->SetInt64("timeUs", timeStamp); - dstBuf->SetInt32("Videoformat", static_cast(targetConfig.GetVideoformat())); - dstBuf->SetInt32("alignedWidth", static_cast(sourceConfig.GetWidth())); - dstBuf->SetInt32("alignedHeight", static_cast(sourceConfig.GetHeight())); - dstBuf->SetInt32("width", static_cast(sourceConfig.GetWidth())); - dstBuf->SetInt32("height", static_cast(sourceConfig.GetHeight())); - DHLOGD("ColorConvert end, dstBuf Videoformat %d, width %d, height %d, alignedWidth %d, alignedHeight %d, " + - "ImgSize%d, timeUs %lld.", targetConfig.GetVideoformat(), sourceConfig.GetWidth(), sourceConfig.GetHeight(), - sourceConfig.GetWidth(), sourceConfig.GetHeight(), dstBuf->Size(), timeStamp); - return dstBuf; + DHLOGD("The current node is the last node, and Output the processed video buffer"); + std::shared_ptr targetPipelineSource = callbackPipelineSource_.lock(); + if (targetPipelineSource == nullptr) { + DHLOGE("callbackPipelineSource_ is nullptr."); + return DCAMERA_BAD_VALUE; + } + targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]); + return DCAMERA_OK; } } // namespace DistributedHardware } // namespace OHOS diff --git a/services/data_process/src/pipeline_node/fpscontroller/fps_controller_process.cpp b/services/data_process/src/pipeline_node/fpscontroller/fps_controller_process.cpp index 4dc226b5..e660d7b0 100644 --- a/services/data_process/src/pipeline_node/fpscontroller/fps_controller_process.cpp +++ b/services/data_process/src/pipeline_node/fpscontroller/fps_controller_process.cpp @@ -29,14 +29,20 @@ FpsControllerProcess::~FpsControllerProcess() } } -int32_t FpsControllerProcess::InitNode() +int32_t FpsControllerProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) { - if (targetConfig_.GetFrameRate() > MAX_TARGET_FRAME_RATE) { + if (targetConfig.GetFrameRate() > MAX_TARGET_FRAME_RATE) { DHLOGE("The target framerate : %d is greater than the max framerate : %d.", - targetConfig_.GetFrameRate(), MAX_TARGET_FRAME_RATE); + targetConfig.GetFrameRate(), MAX_TARGET_FRAME_RATE); return DCAMERA_BAD_TYPE; } + sourceConfig_ = sourceConfig; + targetConfig_ = targetConfig; targetFrameRate_ = targetConfig_.GetFrameRate(); + + processedConfig_ = sourceConfig; + processedConfig = processedConfig_; isFpsControllerProcess_ = true; return DCAMERA_OK; } @@ -215,7 +221,7 @@ float FpsControllerProcess::CalculateFrameRate(int64_t nowMs) const float msPerSecond = 1000; const int32_t minValidCalculatedFrameRatesNum = 2; - int32_t minIncomingFrameNum = static_cast(targetFrameRate_) / MIN_INCOME_FRAME_NUM_COEFFICIENT; + int32_t minIncomingFrameNum = targetFrameRate_ / MIN_INCOME_FRAME_NUM_COEFFICIENT; if (validFramesNumber > minIncomingFrameNum && validFramesNumber > minValidCalculatedFrameRatesNum) { int64_t validTotalTimeInterval = (nowMs - incomingFrameTimesMs_[num - 1]); if (validTotalTimeInterval < 0) { @@ -240,7 +246,7 @@ bool FpsControllerProcess::IsDropFrame(float incomingFps) return false; } const int32_t incomingFrmRate = static_cast(incomingFps); - if (incomingFrmRate > static_cast(targetFrameRate_)) { + if (incomingFrmRate > targetFrameRate_) { DHLOGD("incoming fps not more than targetFrameRate_, not drop"); return false; } @@ -252,7 +258,7 @@ bool FpsControllerProcess::IsDropFrame(float incomingFps) bool FpsControllerProcess::ReduceFrameRateByUniformStrategy(int32_t incomingFrmRate) { DHLOGD("Frame control, reduce frame rate by uniform rate strategy"); - if (incomingFrmRate > static_cast(targetFrameRate_)) { + if (incomingFrmRate > targetFrameRate_) { DHLOGD("incoming fps not more than targetFrameRate_, not drop"); return false; } @@ -295,7 +301,7 @@ bool FpsControllerProcess::ReduceFrameRateByUniformStrategy(int32_t incomingFrmR isDrop = true; keepMoreThanDoubleCount_++; } else { - frameRateOvershootMdf_ = overshoot % static_cast(targetFrameRate_); + frameRateOvershootMdf_ = overshoot % targetFrameRate_; isDrop = false; keepMoreThanDoubleCount_ = 0; } diff --git a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp index c76934da..9b065041 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp @@ -18,7 +18,6 @@ #include "distributed_hardware_log.h" #include "graphic_common_c.h" -#include "convert_nv12_to_nv21.h" #include "dcamera_utils_tools.h" #include "decode_surface_listener.h" #include "decode_video_callback.h" @@ -27,27 +26,34 @@ namespace OHOS { namespace DistributedHardware { DecodeDataProcess::~DecodeDataProcess() { - if (isDecoderProcess_) { + if (isDecoderProcess_.load()) { DHLOGD("~DecodeDataProcess : ReleaseProcessNode."); ReleaseProcessNode(); } } -int32_t DecodeDataProcess::InitNode() +int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) { DHLOGD("Init DCamera DecodeNode start."); - if (!(IsInDecoderRange(sourceConfig_) && IsInDecoderRange(targetConfig_))) { + if (!(IsInDecoderRange(sourceConfig) && IsInDecoderRange(targetConfig))) { DHLOGE("Source config or target config are invalid."); return DCAMERA_BAD_VALUE; } - if (!IsConvertible(sourceConfig_, targetConfig_)) { - DHLOGE("The DecodeNode can't convert %d to %d.", sourceConfig_.GetVideoCodecType(), + if (!IsConvertible(sourceConfig, targetConfig)) { + DHLOGE("The DecodeNode can't convert %d to %d.", sourceConfig.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); return DCAMERA_BAD_TYPE; } + + sourceConfig_ = sourceConfig; + targetConfig_ = targetConfig; if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) { DHLOGD("Disable DecodeNode. The target video codec type %d is the same as the source video codec type %d.", sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + processedConfig_ = sourceConfig; + processedConfig = processedConfig_; + isDecoderProcess_.store(true); return DCAMERA_OK; } @@ -58,8 +64,9 @@ int32_t DecodeDataProcess::InitNode() ReleaseProcessNode(); return err; } - alignedHeight_ = GetAlignedHeight(static_cast(sourceConfig_.GetHeight())); - isDecoderProcess_ = true; + alignedHeight_ = GetAlignedHeight(sourceConfig_.GetHeight()); + processedConfig = processedConfig_; + isDecoderProcess_.store(true); return DCAMERA_OK; } @@ -67,7 +74,7 @@ bool DecodeDataProcess::IsInDecoderRange(const VideoConfigParams& curConfig) { return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH || curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT || - curConfig.GetFrameRate() <= MAX_FRAME_RATE); + curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE); } bool DecodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) @@ -90,10 +97,26 @@ void DecodeDataProcess::InitCodecEvent() int32_t DecodeDataProcess::InitDecoder() { DHLOGD("Init video decoder."); - int32_t err = InitDecoderMetadataFormat(); - if (err != DCAMERA_OK) { + int32_t ret = ConfigureVideoDecoder(); + if (ret != DCAMERA_OK) { DHLOGE("Init video decoder metadata format failed."); - return err; + return ret; + } + + ret = StartVideoDecoder(); + if (ret != DCAMERA_OK) { + DHLOGE("Start Video decoder failed."); + return ret; + } + return DCAMERA_OK; +} + +int32_t DecodeDataProcess::ConfigureVideoDecoder() +{ + int32_t ret = InitDecoderMetadataFormat(); + if (ret != DCAMERA_OK) { + DHLOGE("Init video decoder metadata format failed. Error code %d.", ret); + return ret; } videoDecoder_ = Media::VideoDecoderFactory::CreateByMime(processType_); @@ -102,32 +125,24 @@ int32_t DecodeDataProcess::InitDecoder() return DCAMERA_INIT_ERR; } decodeVideoCallback_ = std::make_shared(shared_from_this()); - int32_t retVal = videoDecoder_->SetCallback(decodeVideoCallback_); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Set video decoder callback failed."); - return DCAMERA_INIT_ERR; - } - retVal = videoDecoder_->Configure(metadataFormat_); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Set video decoder metadata format failed."); + ret = videoDecoder_->SetCallback(decodeVideoCallback_); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Set video decoder callback failed. Error code %d.", ret); return DCAMERA_INIT_ERR; } - retVal = SetDecoderOutputSurface(); - if (retVal != DCAMERA_OK) { - DHLOGE("Set decoder output surface failed."); - return retVal; - } - retVal = videoDecoder_->Prepare(); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Video decoder prepare failed."); + ret = videoDecoder_->Configure(metadataFormat_); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Set video decoder metadata format failed. Error code %d.", ret); return DCAMERA_INIT_ERR; } - retVal = videoDecoder_->Start(); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Video decoder start failed."); - return DCAMERA_INIT_ERR; + + ret = SetDecoderOutputSurface(); + if (ret != DCAMERA_OK) { + DHLOGE("Set decoder output surface failed. Error code %d.", ret); + return ret; } + return DCAMERA_OK; } @@ -147,11 +162,16 @@ int32_t DecodeDataProcess::InitDecoderMetadataFormat() DHLOGE("The current codec type does not support decoding."); return DCAMERA_NOT_FOUND; } + metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV12); metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE); - metadataFormat_.PutIntValue("width", static_cast(sourceConfig_.GetWidth())); - metadataFormat_.PutIntValue("height", static_cast(sourceConfig_.GetHeight())); + metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth()); + metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight()); metadataFormat_.PutIntValue("frame_rate", MAX_FRAME_RATE); + + processedConfig_ = sourceConfig_; + processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC); + processedConfig_.SetVideoformat(Videoformat::NV12); return DCAMERA_OK; } @@ -197,6 +217,26 @@ int32_t DecodeDataProcess::SetDecoderOutputSurface() return DCAMERA_OK; } +int32_t DecodeDataProcess::StartVideoDecoder() +{ + if (videoDecoder_ == nullptr) { + DHLOGE("The video decoder does not exist before StartVideoDecoder."); + return DCAMERA_BAD_VALUE; + } + + int32_t ret = videoDecoder_->Prepare(); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Video decoder prepare failed. Error code %d.", ret); + return DCAMERA_INIT_ERR; + } + ret = videoDecoder_->Start(); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Video decoder start failed. Error code %d.", ret); + return DCAMERA_INIT_ERR; + } + return DCAMERA_OK; +} + int32_t DecodeDataProcess::StopVideoDecoder() { if (videoDecoder_ == nullptr) { @@ -276,7 +316,7 @@ void DecodeDataProcess::ReleaseCodecEvent() void DecodeDataProcess::ReleaseProcessNode() { DHLOGD("Start release [%d] node : DecodeNode.", nodeRank_); - isDecoderProcess_ = false; + isDecoderProcess_.store(false); if (nextDataProcess_ != nullptr) { nextDataProcess_->ReleaseProcessNode(); } @@ -320,7 +360,7 @@ int32_t DecodeDataProcess::ProcessData(std::vector>& DHLOGE("DecodeNode input buffer size %zu error.", inputBuffers[0]->Size()); return DCAMERA_MEMORY_OPT_ERROR; } - if (!isDecoderProcess_) { + if (!isDecoderProcess_.load()) { DHLOGE("Decoder node occurred error or start release."); return DCAMERA_DISABLE_PROCESS; } @@ -346,7 +386,7 @@ int32_t DecodeDataProcess::ProcessData(std::vector>& int32_t DecodeDataProcess::FeedDecoderInputBuffer() { DHLOGD("Feed decoder input buffer."); - while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_)) { + while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_.load())) { std::shared_ptr buffer = inputBuffersQueue_.front(); if (buffer == nullptr || availableInputIndexsQueue_.empty()) { DHLOGE("inputBuffersQueue size %zu, availableInputIndexsQueue size %zu.", @@ -463,21 +503,20 @@ void DecodeDataProcess::CopyDecodedImage(const sptr& surBuf, int6 YUV_BYTES_PER_PIXEL / Y2UV_RATIO); std::shared_ptr bufferOutput = std::make_shared(yuvImageSize); uint8_t *addr = static_cast(surBuf->GetVirAddr()); - if (alignedWidth == static_cast(sourceConfig_.GetWidth()) && - alignedHeight == static_cast(sourceConfig_.GetHeight())) { + if (alignedWidth == sourceConfig_.GetWidth() && + alignedHeight == sourceConfig_.GetHeight()) { errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(), addr, yuvImageSize); if (err != EOK) { DHLOGE("memcpy_s surface buffer failed."); return; } } else { - ImageUnitInfo srcImgInfo = { sourceConfig_.GetVideoformat(), static_cast(sourceConfig_.GetWidth()), - static_cast(sourceConfig_.GetHeight()), alignedWidth, alignedHeight, - static_cast(alignedWidth * alignedHeight), surBuf->GetSize(), addr }; - ImageUnitInfo dstImgInfo = { sourceConfig_.GetVideoformat(), static_cast(sourceConfig_.GetWidth()), - static_cast(sourceConfig_.GetHeight()), static_cast(sourceConfig_.GetWidth()), - static_cast(sourceConfig_.GetHeight()), sourceConfig_.GetWidth() * sourceConfig_.GetHeight(), - bufferOutput->Size(), bufferOutput->Data() }; + ImageUnitInfo srcImgInfo = { processedConfig_.GetVideoformat(), sourceConfig_.GetWidth(), + sourceConfig_.GetHeight(), alignedWidth, alignedHeight, static_cast(alignedWidth * alignedHeight), + surBuf->GetSize(), addr }; + ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), sourceConfig_.GetWidth(), + sourceConfig_.GetHeight(), sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), + sourceConfig_.GetWidth() * sourceConfig_.GetHeight(), bufferOutput->Size(), bufferOutput->Data() }; int32_t retRow = CopyYUVPlaneByRow(srcImgInfo, dstImgInfo); if (retRow != DCAMERA_OK) { DHLOGE("memcpy_s surface buffer failed."); @@ -486,11 +525,11 @@ void DecodeDataProcess::CopyDecodedImage(const sptr& surBuf, int6 } bufferOutput->SetInt64("timeUs", timeStampUs); - bufferOutput->SetInt32("Videoformat", static_cast(sourceConfig_.GetVideoformat())); - bufferOutput->SetInt32("alignedWidth", static_cast(sourceConfig_.GetWidth())); - bufferOutput->SetInt32("alignedHeight", static_cast(sourceConfig_.GetHeight())); - bufferOutput->SetInt32("width", static_cast(sourceConfig_.GetWidth())); - bufferOutput->SetInt32("height", static_cast(sourceConfig_.GetHeight())); + bufferOutput->SetInt32("Videoformat", static_cast(processedConfig_.GetVideoformat())); + bufferOutput->SetInt32("alignedWidth", sourceConfig_.GetWidth()); + bufferOutput->SetInt32("alignedHeight", sourceConfig_.GetHeight()); + bufferOutput->SetInt32("width", sourceConfig_.GetWidth()); + bufferOutput->SetInt32("height", sourceConfig_.GetHeight()); PostOutputDataBuffers(bufferOutput); } @@ -654,15 +693,8 @@ void DecodeDataProcess::OnEvent(DCameraCodecEvent& ev) return; } - std::shared_ptr colorConverter = std::make_shared(); - VideoConfigParams decodedConfig(VideoCodecType::NO_CODEC, Videoformat::NV12, sourceConfig_.GetFrameRate(), - sourceConfig_.GetWidth(), sourceConfig_.GetHeight()); - std::vector> nv21DataBuffers; - std::shared_ptr nv21Image = colorConverter->ProcessData( - receivedCodecPacket->GetDataBuffers()[0], decodedConfig, targetConfig_); - nv21DataBuffers.push_back(nv21Image); - - DecodeDone(nv21DataBuffers); + std::vector> yuvDataBuffers = receivedCodecPacket->GetDataBuffers(); + DecodeDone(yuvDataBuffers); break; } case VideoCodecAction::ACTION_ONCE_AGAIN: @@ -678,7 +710,7 @@ void DecodeDataProcess::OnEvent(DCameraCodecEvent& ev) void DecodeDataProcess::OnError() { DHLOGD("DecodeDataProcess : OnError."); - isDecoderProcess_ = false; + isDecoderProcess_.store(false); videoDecoder_->Stop(); std::shared_ptr targetPipelineSource = callbackPipelineSource_.lock(); if (targetPipelineSource == nullptr) { @@ -712,7 +744,7 @@ void DecodeDataProcess::OnOutputFormatChanged(const Media::Format &format) void DecodeDataProcess::OnOutputBufferAvailable(uint32_t index, const Media::AVCodecBufferInfo& info, const Media::AVCodecBufferFlag& flag) { - if (!isDecoderProcess_) { + if (!isDecoderProcess_.load()) { DHLOGE("Decoder node occurred error or start release."); return; } diff --git a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp index bdafe6c5..3134f382 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp @@ -18,7 +18,6 @@ #include "distributed_hardware_log.h" #include "graphic_common_c.h" -#include "convert_nv12_to_nv21.h" #include "dcamera_utils_tools.h" #include "decode_surface_listener.h" #include "decode_video_callback.h" @@ -27,27 +26,34 @@ namespace OHOS { namespace DistributedHardware { DecodeDataProcess::~DecodeDataProcess() { - if (isDecoderProcess_) { + if (isDecoderProcess_.load()) { DHLOGD("~DecodeDataProcess : ReleaseProcessNode."); ReleaseProcessNode(); } } -int32_t DecodeDataProcess::InitNode() +int32_t DecodeDataProcess::DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) { DHLOGD("Common Init DCamera DecodeNode start."); - if (!(IsInDecoderRange(sourceConfig_) && IsInDecoderRange(targetConfig_))) { + if (!(IsInDecoderRange(sourceConfig) && IsInDecoderRange(targetConfig))) { DHLOGE("Common Source config or target config are invalid."); return DCAMERA_BAD_VALUE; } - if (!IsConvertible(sourceConfig_, targetConfig_)) { + if (!IsConvertible(sourceConfig, targetConfig)) { DHLOGE("Common The DecodeNode can't convert %d to %d.", sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); return DCAMERA_BAD_TYPE; } + + sourceConfig_ = sourceConfig; + targetConfig_ = targetConfig; if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) { DHLOGD("Disable DecodeNode. The target video codec type %d is the same as the source video codec type %d.", sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + processedConfig_ = sourceConfig; + processedConfig = processedConfig_; + isDecoderProcess_.store(true); return DCAMERA_OK; } @@ -58,8 +64,9 @@ int32_t DecodeDataProcess::InitNode() ReleaseProcessNode(); return err; } - alignedHeight_ = GetAlignedHeight(static_cast(sourceConfig_.GetHeight())); - isDecoderProcess_ = true; + alignedHeight_ = GetAlignedHeight(sourceConfig_.GetHeight()); + processedConfig = processedConfig_; + isDecoderProcess_.store(true); return DCAMERA_OK; } @@ -67,7 +74,7 @@ bool DecodeDataProcess::IsInDecoderRange(const VideoConfigParams& curConfig) { return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH || curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT || - curConfig.GetFrameRate() <= MAX_FRAME_RATE); + curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE); } bool DecodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) @@ -89,11 +96,27 @@ void DecodeDataProcess::InitCodecEvent() int32_t DecodeDataProcess::InitDecoder() { - DHLOGD("Common Init video decoder."); - int32_t err = InitDecoderMetadataFormat(); - if (err != DCAMERA_OK) { - DHLOGE("Init video decoder metadata format fail."); - return err; + DHLOGD("Init video decoder."); + int32_t ret = ConfigureVideoDecoder(); + if (ret != DCAMERA_OK) { + DHLOGE("Init video decoder metadata format failed."); + return ret; + } + + ret = StartVideoDecoder(); + if (ret != DCAMERA_OK) { + DHLOGE("Start Video decoder failed."); + return ret; + } + return DCAMERA_OK; +} + +int32_t DecodeDataProcess::ConfigureVideoDecoder() +{ + int32_t ret = InitDecoderMetadataFormat(); + if (ret != DCAMERA_OK) { + DHLOGE("Init video decoder metadata format failed. Error code %d.", ret); + return ret; } videoDecoder_ = Media::VideoDecoderFactory::CreateByMime(processType_); @@ -102,32 +125,24 @@ int32_t DecodeDataProcess::InitDecoder() return DCAMERA_INIT_ERR; } decodeVideoCallback_ = std::make_shared(shared_from_this()); - int32_t retVal = videoDecoder_->SetCallback(decodeVideoCallback_); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Set video decoder callback failed."); - return DCAMERA_INIT_ERR; - } - retVal = videoDecoder_->Configure(metadataFormat_); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Set video decoder metadata format failed."); + ret = videoDecoder_->SetCallback(decodeVideoCallback_); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Set video decoder callback failed. Error code %d.", ret); return DCAMERA_INIT_ERR; } - retVal = SetDecoderOutputSurface(); - if (retVal != DCAMERA_OK) { - DHLOGE("Set decoder output surface fail."); - return retVal; - } - retVal = videoDecoder_->Prepare(); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Video decoder prepare failed."); + ret = videoDecoder_->Configure(metadataFormat_); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Set video decoder metadata format failed. Error code %d.", ret); return DCAMERA_INIT_ERR; } - retVal = videoDecoder_->Start(); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Video decoder start failed."); - return DCAMERA_INIT_ERR; + + ret = SetDecoderOutputSurface(); + if (ret != DCAMERA_OK) { + DHLOGE("Set decoder output surface failed. Error code %d.", ret); + return ret; } + return DCAMERA_OK; } @@ -139,9 +154,12 @@ int32_t DecodeDataProcess::InitDecoderMetadataFormat() metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::RGBA); metadataFormat_.PutIntValue("max_input_size", MAX_RGB32_BUFFER_SIZE); - metadataFormat_.PutIntValue("width", static_cast(sourceConfig_.GetWidth())); - metadataFormat_.PutIntValue("height", static_cast(sourceConfig_.GetHeight())); + metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth()); + metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight()); metadataFormat_.PutIntValue("frame_rate", MAX_FRAME_RATE); + + processedConfig_ = sourceConfig_; + processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC); return DCAMERA_OK; } @@ -187,6 +205,27 @@ int32_t DecodeDataProcess::SetDecoderOutputSurface() return DCAMERA_OK; } +int32_t DecodeDataProcess::StartVideoDecoder() +{ + if (videoDecoder_ == nullptr) { + DHLOGE("The video decoder does not exist before StartVideoDecoder."); + return DCAMERA_BAD_VALUE; + } + + int32_t ret = videoDecoder_->Prepare(); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Video decoder prepare failed. Error code %d.", ret); + return DCAMERA_INIT_ERR; + } + ret = videoDecoder_->Start(); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Video decoder start failed. Error code %d.", ret); + return DCAMERA_INIT_ERR; + } + return DCAMERA_OK; +} + + int32_t DecodeDataProcess::StopVideoDecoder() { if (videoDecoder_ == nullptr) { @@ -266,7 +305,7 @@ void DecodeDataProcess::ReleaseCodecEvent() void DecodeDataProcess::ReleaseProcessNode() { DHLOGD("Start release [%d] node : DecodeNode.", nodeRank_); - isDecoderProcess_ = false; + isDecoderProcess_.store(false); if (nextDataProcess_ != nullptr) { nextDataProcess_->ReleaseProcessNode(); } @@ -310,7 +349,7 @@ int32_t DecodeDataProcess::ProcessData(std::vector>& DHLOGE("DecodeNode input buffer size %d error.", inputBuffers[0]->Size()); return DCAMERA_MEMORY_OPT_ERROR; } - if (!isDecoderProcess_) { + if (!isDecoderProcess_.load()) { DHLOGE("Decoder node occurred error or start release."); return DCAMERA_DISABLE_PROCESS; } @@ -336,7 +375,7 @@ int32_t DecodeDataProcess::ProcessData(std::vector>& int32_t DecodeDataProcess::FeedDecoderInputBuffer() { DHLOGD("Feed decoder input buffer."); - while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_)) { + while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_.load())) { std::shared_ptr buffer = inputBuffersQueue_.front(); if (buffer == nullptr || availableInputIndexsQueue_.empty()) { DHLOGE("inputBuffersQueue size %d, availableInputIndexsQueue size %d.", @@ -459,11 +498,11 @@ void DecodeDataProcess::CopyDecodedImage(const sptr& surBuf, int6 return; } bufferOutput->SetInt64("timeUs", timeStampUs); - bufferOutput->SetInt32("Videoformat", static_cast(sourceConfig_.GetVideoformat())); - bufferOutput->SetInt32("alignedWidth", static_cast(sourceConfig_.GetWidth())); - bufferOutput->SetInt32("alignedHeight", static_cast(sourceConfig_.GetHeight())); - bufferOutput->SetInt32("width", static_cast(sourceConfig_.GetWidth())); - bufferOutput->SetInt32("height", static_cast(sourceConfig_.GetHeight())); + bufferOutput->SetInt32("Videoformat", static_cast(processedConfig_.GetVideoformat())); + bufferOutput->SetInt32("alignedWidth", sourceConfig_.GetWidth()); + bufferOutput->SetInt32("alignedHeight", sourceConfig_.GetHeight()); + bufferOutput->SetInt32("width", sourceConfig_.GetWidth()); + bufferOutput->SetInt32("height", sourceConfig_.GetHeight()); PostOutputDataBuffers(bufferOutput); } @@ -555,7 +594,7 @@ void DecodeDataProcess::OnEvent(DCameraCodecEvent& ev) void DecodeDataProcess::OnError() { DHLOGD("DecodeDataProcess : OnError."); - isDecoderProcess_ = false; + isDecoderProcess_.store(false); videoDecoder_->Stop(); std::shared_ptr targetPipelineSource = callbackPipelineSource_.lock(); if (targetPipelineSource == nullptr) { @@ -589,7 +628,7 @@ void DecodeDataProcess::OnOutputFormatChanged(const Media::Format &format) void DecodeDataProcess::OnOutputBufferAvailable(uint32_t index, const Media::AVCodecBufferInfo& info, const Media::AVCodecBufferFlag& flag) { - if (!isDecoderProcess_) { + if (!isDecoderProcess_.load()) { DHLOGE("Decoder node occurred error or start release."); return; } diff --git a/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process.cpp b/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process.cpp index ed2d5ca5..0f27223b 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process.cpp @@ -45,27 +45,34 @@ const std::map EncodeDataProcess::ENCODER_BITRATE_TABLE = { EncodeDataProcess::~EncodeDataProcess() { - if (isEncoderProcess_) { + if (isEncoderProcess_.load()) { DHLOGD("~EncodeDataProcess : ReleaseProcessNode."); ReleaseProcessNode(); } } -int32_t EncodeDataProcess::InitNode() +int32_t EncodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) { DHLOGD("Init DCamera EncodeNode start."); - if (!(IsInEncoderRange(sourceConfig_) && IsInEncoderRange(targetConfig_))) { + if (!(IsInEncoderRange(sourceConfig) && IsInEncoderRange(targetConfig))) { DHLOGE("Source config or target config are invalid."); return DCAMERA_BAD_VALUE; } - if (!IsConvertible(sourceConfig_, targetConfig_)) { + if (!IsConvertible(sourceConfig, targetConfig)) { DHLOGE("The EncodeNode cannot convert source VideoCodecType %d to target VideoCodecType %d.", - sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + sourceConfig.GetVideoCodecType(), targetConfig.GetVideoCodecType()); return DCAMERA_BAD_TYPE; } + + sourceConfig_ = sourceConfig; + targetConfig_ = targetConfig; if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) { DHLOGD("Disable EncodeNode. The target VideoCodecType %d is the same as the source VideoCodecType %d.", sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + processedConfig_ = sourceConfig; + processedConfig = processedConfig_; + isEncoderProcess_.store(true); return DCAMERA_OK; } @@ -75,7 +82,8 @@ int32_t EncodeDataProcess::InitNode() ReleaseProcessNode(); return err; } - isEncoderProcess_ = true; + processedConfig = processedConfig_; + isEncoderProcess_.store(true); return DCAMERA_OK; } @@ -83,7 +91,7 @@ bool EncodeDataProcess::IsInEncoderRange(const VideoConfigParams& curConfig) { return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH || curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT || - curConfig.GetFrameRate() <= MAX_FRAME_RATE); + curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE); } bool EncodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) @@ -95,15 +103,32 @@ bool EncodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, con int32_t EncodeDataProcess::InitEncoder() { DHLOGD("Init video encoder."); - int32_t err = InitEncoderMetadataFormat(); - if (err != DCAMERA_OK) { - DHLOGE("Init video encoder metadata format failed."); - return err; + int32_t ret = ConfigureVideoEncoder(); + if (ret != DCAMERA_OK) { + DHLOGE("Init video encoder metadata format failed. Error code %d.", ret); + return ret; } - err = InitEncoderBitrateFormat(); - if (err != DCAMERA_OK) { - DHLOGE("Init video encoder bitrate format failed."); - return err; + + ret = StartVideoEncoder(); + if (ret != DCAMERA_OK) { + DHLOGE("Start Video encoder failed."); + return ret; + } + + return DCAMERA_OK; +} + +int32_t EncodeDataProcess::ConfigureVideoEncoder() +{ + int32_t ret = InitEncoderMetadataFormat(); + if (ret != DCAMERA_OK) { + DHLOGE("Init video encoder metadata format failed. Error code %d.", ret); + return ret; + } + ret = InitEncoderBitrateFormat(); + if (ret != DCAMERA_OK) { + DHLOGE("Init video encoder bitrate format failed. Error code %d.", ret); + return ret; } videoEncoder_ = Media::VideoEncoderFactory::CreateByMime(processType_); @@ -112,52 +137,53 @@ int32_t EncodeDataProcess::InitEncoder() return DCAMERA_INIT_ERR; } encodeVideoCallback_ = std::make_shared(shared_from_this()); - int32_t retVal = videoEncoder_->SetCallback(encodeVideoCallback_); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Set video encoder callback failed."); + ret = videoEncoder_->SetCallback(encodeVideoCallback_); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Set video encoder callback failed. Error code %d.", ret); return DCAMERA_INIT_ERR; } - retVal = videoEncoder_->Configure(metadataFormat_); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Set video encoder metadata format failed."); + + ret = videoEncoder_->Configure(metadataFormat_); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Set video encoder metadata format failed. Error code %d.", ret); return DCAMERA_INIT_ERR; } + encodeProducerSurface_ = videoEncoder_->CreateInputSurface(); if (encodeProducerSurface_ == nullptr) { DHLOGE("Get video encoder producer surface failed."); return DCAMERA_INIT_ERR; } - retVal = videoEncoder_->Prepare(); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Video encoder prepare failed."); - return DCAMERA_INIT_ERR; - } - retVal = videoEncoder_->Start(); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Video encoder start failed."); - return DCAMERA_INIT_ERR; - } + return DCAMERA_OK; } int32_t EncodeDataProcess::InitEncoderMetadataFormat() { DHLOGD("Init video encoder metadata format."); + + processedConfig_ = sourceConfig_; + switch (targetConfig_.GetVideoCodecType()) { case VideoCodecType::CODEC_H264: processType_ = "video/avc"; metadataFormat_.PutStringValue("codec_mime", processType_); metadataFormat_.PutIntValue("codec_profile", Media::AVCProfile::AVC_PROFILE_BASELINE); + + processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H264); break; case VideoCodecType::CODEC_H265: processType_ = "video/hevc"; metadataFormat_.PutStringValue("codec_mime", processType_); metadataFormat_.PutIntValue("codec_profile", Media::HEVCProfile::HEVC_PROFILE_MAIN); + + processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H265); break; default: DHLOGE("The current codec type does not support encoding."); return DCAMERA_NOT_FOUND; } + switch (sourceConfig_.GetVideoformat()) { case Videoformat::YUVI420: metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::YUVI420); @@ -214,6 +240,26 @@ int32_t EncodeDataProcess::InitEncoderBitrateFormat() return DCAMERA_OK; } +int32_t EncodeDataProcess::StartVideoEncoder() +{ + if (videoEncoder_ == nullptr) { + DHLOGE("The video encoder does not exist before StopVideoEncoder."); + return DCAMERA_BAD_VALUE; + } + + int32_t ret = videoEncoder_->Prepare(); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Video encoder prepare failed. Error code %d.", ret); + return DCAMERA_INIT_ERR; + } + ret = videoEncoder_->Start(); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Video encoder start failed. Error code %d.", ret); + return DCAMERA_INIT_ERR; + } + return DCAMERA_OK; +} + int32_t EncodeDataProcess::StopVideoEncoder() { if (videoEncoder_ == nullptr) { @@ -266,7 +312,7 @@ void EncodeDataProcess::ReleaseVideoEncoder() void EncodeDataProcess::ReleaseProcessNode() { DHLOGD("Start release [%d] node : EncodeNode.", nodeRank_); - isEncoderProcess_ = false; + isEncoderProcess_.store(false); if (nextDataProcess_ != nullptr) { nextDataProcess_->ReleaseProcessNode(); } @@ -301,7 +347,7 @@ int32_t EncodeDataProcess::ProcessData(std::vector>& DHLOGE("EncodeNode input buffer size %d error.", inputBuffers[0]->Size()); return DCAMERA_MEMORY_OPT_ERROR; } - if (!isEncoderProcess_) { + if (!isEncoderProcess_.load()) { DHLOGE("EncodeNode occurred error or start release."); return DCAMERA_DISABLE_PROCESS; } @@ -311,7 +357,7 @@ int32_t EncodeDataProcess::ProcessData(std::vector>& return err; } - IncreaseWaitDecodeCnt(); + IncreaseWaitEncodeCnt(); return DCAMERA_OK; } @@ -358,8 +404,8 @@ int32_t EncodeDataProcess::FeedEncoderInputBuffer(std::shared_ptr& i sptr EncodeDataProcess::GetEncoderInputSurfaceBuffer() { BufferRequestConfig requestConfig; - requestConfig.width = static_cast(sourceConfig_.GetWidth()); - requestConfig.height = static_cast(sourceConfig_.GetHeight()); + requestConfig.width = sourceConfig_.GetWidth(); + requestConfig.height = sourceConfig_.GetHeight(); requestConfig.usage = HBM_USE_CPU_READ | HBM_USE_CPU_WRITE | HBM_USE_MEM_DMA; requestConfig.timeout = 0; requestConfig.strideAlignment = ENCODER_STRIDE_ALIGNMENT; @@ -400,7 +446,7 @@ int64_t EncodeDataProcess::GetEncoderTimeStamp() return TimeIntervalStampUs; } -void EncodeDataProcess::IncreaseWaitDecodeCnt() +void EncodeDataProcess::IncreaseWaitEncodeCnt() { std::lock_guard lck(mtxHoldCount_); if (inputTimeStampUs_ == 0) { @@ -411,7 +457,7 @@ void EncodeDataProcess::IncreaseWaitDecodeCnt() DHLOGD("Wait encoder output frames number is %d.", waitEncoderOutputCount_); } -void EncodeDataProcess::ReduceWaitDecodeCnt() +void EncodeDataProcess::ReduceWaitEncodeCnt() { std::lock_guard lck(mtxHoldCount_); if (waitEncoderOutputCount_ <= 0) { @@ -484,7 +530,7 @@ int32_t EncodeDataProcess::EncodeDone(std::vector>& void EncodeDataProcess::OnError() { DHLOGD("EncodeDataProcess : OnError."); - isEncoderProcess_ = false; + isEncoderProcess_.store(false); videoEncoder_->Flush(); videoEncoder_->Stop(); std::shared_ptr targetPipelineSink = callbackPipelineSink_.lock(); @@ -512,7 +558,7 @@ void EncodeDataProcess::OnOutputFormatChanged(const Media::Format &format) void EncodeDataProcess::OnOutputBufferAvailable(uint32_t index, Media::AVCodecBufferInfo info, Media::AVCodecBufferFlag flag) { - if (!isEncoderProcess_) { + if (!isEncoderProcess_.load()) { DHLOGE("EncodeNode occurred error or start release."); return; } @@ -523,7 +569,7 @@ void EncodeDataProcess::OnOutputBufferAvailable(uint32_t index, Media::AVCodecBu DHLOGE("Get encode output Buffer failed."); return; } - ReduceWaitDecodeCnt(); + ReduceWaitEncodeCnt(); if (videoEncoder_ == nullptr) { DHLOGE("The video encoder does not exist before release output buffer index."); diff --git a/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process_common.cpp b/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process_common.cpp index 919724f5..00393792 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process_common.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process_common.cpp @@ -45,37 +45,45 @@ const std::map EncodeDataProcess::ENCODER_BITRATE_TABLE = { EncodeDataProcess::~EncodeDataProcess() { - if (isEncoderProcess_) { + if (isEncoderProcess_.load()) { DHLOGD("~EncodeDataProcess : ReleaseProcessNode."); ReleaseProcessNode(); } } -int32_t EncodeDataProcess::InitNode() +int32_t EncodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, + VideoConfigParams& processedConfig) { DHLOGD("Common Init DCamera EncodeNode start."); - if (!(IsInEncoderRange(sourceConfig_) && IsInEncoderRange(targetConfig_))) { + if (!(IsInEncoderRange(sourceConfig) && IsInEncoderRange(targetConfig))) { DHLOGE("Common Source config or target config are invalid."); return DCAMERA_BAD_VALUE; } - if (!IsConvertible(sourceConfig_, targetConfig_)) { + if (!IsConvertible(sourceConfig, targetConfig)) { DHLOGE("Common The EncodeNode cannot convert source VideoCodecType %d to target VideoCodecType %d.", - sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + sourceConfig.GetVideoCodecType(), targetConfig.GetVideoCodecType()); return DCAMERA_BAD_TYPE; } + + sourceConfig_ = sourceConfig; + targetConfig_ = targetConfig; if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) { DHLOGD("Common Disable EncodeNode. The target VideoCodecType %d is the same as the source VideoCodecType %d.", sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + processedConfig_ = sourceConfig; + processedConfig = processedConfig_; + isEncoderProcess_.store(true); return DCAMERA_OK; } int32_t err = InitEncoder(); if (err != DCAMERA_OK) { - DHLOGE("Common Init video encoder fail."); + DHLOGE("Common Init video encoder failed."); ReleaseProcessNode(); return err; } - isEncoderProcess_ = true; + processedConfig = processedConfig_; + isEncoderProcess_.store(true); return DCAMERA_OK; } @@ -83,7 +91,7 @@ bool EncodeDataProcess::IsInEncoderRange(const VideoConfigParams& curConfig) { return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH || curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT || - curConfig.GetFrameRate() <= MAX_FRAME_RATE); + curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE); } bool EncodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig) @@ -95,15 +103,32 @@ bool EncodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, con int32_t EncodeDataProcess::InitEncoder() { DHLOGD("Common Init video encoder."); - int32_t err = InitEncoderMetadataFormat(); - if (err != DCAMERA_OK) { - DHLOGE("Common Init video encoder metadata format fail."); - return err; + int32_t ret = ConfigureVideoEncoder(); + if (ret != DCAMERA_OK) { + DHLOGE("Init video encoder metadata format failed. Error code %d.", ret); + return ret; } - err = InitEncoderBitrateFormat(); - if (err != DCAMERA_OK) { - DHLOGE("Common Init video encoder bitrate format fail."); - return err; + + ret = StartVideoEncoder(); + if (ret != DCAMERA_OK) { + DHLOGE("Start Video encoder failed."); + return ret; + } + + return DCAMERA_OK; +} + +int32_t EncodeDataProcess::ConfigureVideoEncoder() +{ + int32_t ret = InitEncoderMetadataFormat(); + if (ret != DCAMERA_OK) { + DHLOGE("Init video encoder metadata format failed. Error code %d.", ret); + return ret; + } + ret = InitEncoderBitrateFormat(); + if (ret != DCAMERA_OK) { + DHLOGE("Init video encoder bitrate format failed. Error code %d.", ret); + return ret; } videoEncoder_ = Media::VideoEncoderFactory::CreateByMime(processType_); @@ -112,45 +137,43 @@ int32_t EncodeDataProcess::InitEncoder() return DCAMERA_INIT_ERR; } encodeVideoCallback_ = std::make_shared(shared_from_this()); - int32_t retVal = videoEncoder_->SetCallback(encodeVideoCallback_); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Set video encoder callback failed."); + ret = videoEncoder_->SetCallback(encodeVideoCallback_); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Set video encoder callback failed. Error code %d.", ret); return DCAMERA_INIT_ERR; } - retVal = videoEncoder_->Configure(metadataFormat_); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Set video encoder metadata format failed."); + + ret = videoEncoder_->Configure(metadataFormat_); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Set video encoder metadata format failed. Error code %d.", ret); return DCAMERA_INIT_ERR; } + encodeProducerSurface_ = videoEncoder_->CreateInputSurface(); if (encodeProducerSurface_ == nullptr) { DHLOGE("Get video encoder producer surface failed."); return DCAMERA_INIT_ERR; } - retVal = videoEncoder_->Prepare(); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Video encoder prepare failed."); - return DCAMERA_INIT_ERR; - } - retVal = videoEncoder_->Start(); - if (retVal != Media::MediaServiceErrCode::MSERR_OK) { - DHLOGE("Video encoder start failed."); - return DCAMERA_INIT_ERR; - } + return DCAMERA_OK; } int32_t EncodeDataProcess::InitEncoderMetadataFormat() { DHLOGD("Common Init video encoder metadata format."); + + processedConfig_ = sourceConfig_; + processType_ = "video/mp4v-es"; metadataFormat_.PutStringValue("codec_mime", processType_); metadataFormat_.PutIntValue("codec_profile", Media::MPEG4Profile::MPEG4_PROFILE_ADVANCED_CODING); + processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_MPEG4); + metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::RGBA); metadataFormat_.PutLongValue("max_input_size", NORM_RGB32_BUFFER_SIZE); - metadataFormat_.PutIntValue("width", static_cast(sourceConfig_.GetWidth())); - metadataFormat_.PutIntValue("height", static_cast(sourceConfig_.GetHeight())); + metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth()); + metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight()); metadataFormat_.PutIntValue("frame_rate", MAX_FRAME_RATE); return DCAMERA_OK; } @@ -189,6 +212,26 @@ int32_t EncodeDataProcess::InitEncoderBitrateFormat() return DCAMERA_OK; } +int32_t EncodeDataProcess::StartVideoEncoder() +{ + if (videoEncoder_ == nullptr) { + DHLOGE("The video encoder does not exist before StopVideoEncoder."); + return DCAMERA_BAD_VALUE; + } + + int32_t ret = videoEncoder_->Prepare(); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Video encoder prepare failed. Error code %d.", ret); + return DCAMERA_INIT_ERR; + } + ret = videoEncoder_->Start(); + if (ret != Media::MediaServiceErrCode::MSERR_OK) { + DHLOGE("Video encoder start failed. Error code %d.", ret); + return DCAMERA_INIT_ERR; + } + return DCAMERA_OK; +} + int32_t EncodeDataProcess::StopVideoEncoder() { if (videoEncoder_ == nullptr) { @@ -241,7 +284,7 @@ void EncodeDataProcess::ReleaseVideoEncoder() void EncodeDataProcess::ReleaseProcessNode() { DHLOGD("Start release [%d] node : EncodeNode.", nodeRank_); - isEncoderProcess_ = false; + isEncoderProcess_.store(false); if (nextDataProcess_ != nullptr) { nextDataProcess_->ReleaseProcessNode(); } @@ -277,7 +320,7 @@ int32_t EncodeDataProcess::ProcessData(std::vector>& DHLOGE("EncodeNode input buffer size %d error.", inputBuffers[0]->Size()); return DCAMERA_MEMORY_OPT_ERROR; } - if (!isEncoderProcess_) { + if (!isEncoderProcess_.load()) { DHLOGE("EncodeNode occurred error or start release."); return DCAMERA_DISABLE_PROCESS; } @@ -287,7 +330,7 @@ int32_t EncodeDataProcess::ProcessData(std::vector>& return err; } - IncreaseWaitDecodeCnt(); + IncreaseWaitEncodeCnt(); return DCAMERA_OK; } @@ -334,8 +377,8 @@ int32_t EncodeDataProcess::FeedEncoderInputBuffer(std::shared_ptr& i sptr EncodeDataProcess::GetEncoderInputSurfaceBuffer() { BufferRequestConfig requestConfig; - requestConfig.width = static_cast(sourceConfig_.GetWidth()); - requestConfig.height = static_cast(sourceConfig_.GetHeight()); + requestConfig.width = sourceConfig_.GetWidth(); + requestConfig.height = sourceConfig_.GetHeight(); requestConfig.usage = HBM_USE_CPU_READ | HBM_USE_CPU_WRITE | HBM_USE_MEM_DMA; requestConfig.timeout = 0; requestConfig.strideAlignment = ENCODER_STRIDE_ALIGNMENT; @@ -356,7 +399,7 @@ int64_t EncodeDataProcess::GetEncoderTimeStamp() return nowTimeUs; } -void EncodeDataProcess::IncreaseWaitDecodeCnt() +void EncodeDataProcess::IncreaseWaitEncodeCnt() { std::lock_guard lck(mtxHoldCount_); if (inputTimeStampUs_ == 0) { @@ -367,7 +410,7 @@ void EncodeDataProcess::IncreaseWaitDecodeCnt() DHLOGD("Wait encoder output frames number is %d.", waitEncoderOutputCount_); } -void EncodeDataProcess::ReduceWaitDecodeCnt() +void EncodeDataProcess::ReduceWaitEncodeCnt() { std::lock_guard lck(mtxHoldCount_); if (waitEncoderOutputCount_ <= 0) { @@ -440,7 +483,7 @@ int32_t EncodeDataProcess::EncodeDone(std::vector>& void EncodeDataProcess::OnError() { DHLOGD("EncodeDataProcess : OnError."); - isEncoderProcess_ = false; + isEncoderProcess_.store(false); videoEncoder_->Flush(); videoEncoder_->Stop(); std::shared_ptr targetPipelineSink = callbackPipelineSink_.lock(); @@ -468,7 +511,7 @@ void EncodeDataProcess::OnOutputFormatChanged(const Media::Format &format) void EncodeDataProcess::OnOutputBufferAvailable(uint32_t index, Media::AVCodecBufferInfo info, Media::AVCodecBufferFlag flag) { - if (!isEncoderProcess_) { + if (!isEncoderProcess_.load()) { DHLOGE("EncodeNode occurred error or start release."); return; } @@ -479,7 +522,7 @@ void EncodeDataProcess::OnOutputBufferAvailable(uint32_t index, Media::AVCodecBu DHLOGE("Get encode output Buffer fail."); return; } - ReduceWaitDecodeCnt(); + ReduceWaitEncodeCnt(); if (videoEncoder_ == nullptr) { DHLOGE("The video encoder does not exist before release output buffer index."); diff --git a/services/data_process/src/utils/image_common_type.cpp b/services/data_process/src/utils/image_common_type.cpp index 9cff6d05..2ee4ab29 100644 --- a/services/data_process/src/utils/image_common_type.cpp +++ b/services/data_process/src/utils/image_common_type.cpp @@ -27,12 +27,12 @@ void VideoConfigParams::SetVideoformat(Videoformat pixelFormat) pixelFormat_ = pixelFormat; } -void VideoConfigParams::SetFrameRate(uint32_t frameRate) +void VideoConfigParams::SetFrameRate(int32_t frameRate) { frameRate_ = frameRate; } -void VideoConfigParams::SetWidthAndHeight(uint32_t width, uint32_t height) +void VideoConfigParams::SetWidthAndHeight(int32_t width, int32_t height) { width_ = width; height_ = height; @@ -48,17 +48,17 @@ Videoformat VideoConfigParams::GetVideoformat() const return pixelFormat_; } -uint32_t VideoConfigParams::GetFrameRate() const +int32_t VideoConfigParams::GetFrameRate() const { return frameRate_; } -uint32_t VideoConfigParams::GetWidth() const +int32_t VideoConfigParams::GetWidth() const { return width_; } -uint32_t VideoConfigParams::GetHeight() const +int32_t VideoConfigParams::GetHeight() const { return height_; } -- Gitee From 56a24d7a09d3e994abff279abbc506393d129686 Mon Sep 17 00:00:00 2001 From: t00605578 Date: Fri, 6 May 2022 11:40:15 +0800 Subject: [PATCH 2/4] Adapts to the ColorConvertNode upgrade Signed-off-by: t00605578 --- .../colorspace_conversion/color_format_process.cpp | 8 ++++---- .../multimedia_codec/decoder/decode_data_process.cpp | 4 ++-- .../decoder/decode_data_process_common.cpp | 4 ++-- .../multimedia_codec/encoder/encode_data_process.cpp | 4 ++-- .../encoder/encode_data_process_common.cpp | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp b/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp index d35718d3..dd76be78 100644 --- a/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp +++ b/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp @@ -78,9 +78,9 @@ int32_t ColorFormatProcess::ProcessData(std::vector> return DCAMERA_BAD_VALUE; } - if (sourceConfig_.GetVideoformat() == targetConfig_.GetVideoformat()) { + if (sourceConfig_.GetVideoformat() == processedConfig_.GetVideoformat()) { DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.", - sourceConfig_.GetVideoformat(), targetConfig_.GetVideoformat()); + sourceConfig_.GetVideoformat(), processedConfig_.GetVideoformat()); return ColorFormatDone(inputBuffers); } @@ -98,7 +98,7 @@ int32_t ColorFormatProcess::ProcessData(std::vector> size_t dstBufsize = sourceConfig_.GetWidth() * sourceConfig_.GetHeight() * YUV_BYTES_PER_PIXEL / Y2UV_RATIO; std::shared_ptr dstBuf = std::make_shared(dstBufsize); - ImageUnitInfo dstImgInfo = { targetConfig_.GetVideoformat(), sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), + ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), sourceConfig_.GetWidth() * sourceConfig_.GetHeight(), dstBuf->Size(), dstBuf->Data() }; if (ColorConvertByColorFormat(srcImgInfo, dstImgInfo) != DCAMERA_OK) { @@ -107,7 +107,7 @@ int32_t ColorFormatProcess::ProcessData(std::vector> } dstBuf->SetInt64("timeUs", timeStamp); - dstBuf->SetInt32("Videoformat", static_cast(targetConfig_.GetVideoformat())); + dstBuf->SetInt32("Videoformat", static_cast(processedConfig_.GetVideoformat())); dstBuf->SetInt32("alignedWidth", sourceConfig_.GetWidth()); dstBuf->SetInt32("alignedHeight", sourceConfig_.GetHeight()); dstBuf->SetInt32("width", sourceConfig_.GetWidth()); diff --git a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp index 9b065041..9405fa0e 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp @@ -342,9 +342,9 @@ int32_t DecodeDataProcess::ProcessData(std::vector>& DHLOGE("The input data buffers is empty."); return DCAMERA_BAD_VALUE; } - if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) { + if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) { DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.", - sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType()); return DecodeDone(inputBuffers); } diff --git a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp index 3134f382..ea882095 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp @@ -331,9 +331,9 @@ int32_t DecodeDataProcess::ProcessData(std::vector>& DHLOGE("The input data buffers is empty."); return DCAMERA_BAD_VALUE; } - if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) { + if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) { DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.", - sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType()); return DecodeDone(inputBuffers); } diff --git a/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process.cpp b/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process.cpp index 0f27223b..abfc0e6e 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process.cpp @@ -333,9 +333,9 @@ int32_t EncodeDataProcess::ProcessData(std::vector>& DHLOGE("The input data buffers is empty."); return DCAMERA_BAD_VALUE; } - if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) { + if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) { DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.", - sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType()); return EncodeDone(inputBuffers); } diff --git a/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process_common.cpp b/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process_common.cpp index 00393792..193cf2c4 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process_common.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/encoder/encode_data_process_common.cpp @@ -305,9 +305,9 @@ int32_t EncodeDataProcess::ProcessData(std::vector>& DHLOGE("The input data buffers is empty."); return DCAMERA_BAD_VALUE; } - if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) { + if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) { DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.", - sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType()); + sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType()); return EncodeDone(inputBuffers); } -- Gitee From bf54bb3aa8ba5e8612b4949a07dd23950ceae1c5 Mon Sep 17 00:00:00 2001 From: t00605578 Date: Fri, 6 May 2022 14:18:10 +0800 Subject: [PATCH 3/4] Adapts to the ColorConvertNode upgrade Signed-off-by: t00605578 --- .../colorspace_conversion/color_format_process.cpp | 14 +++++++------- .../decoder/decode_data_process.cpp | 14 +++++++------- .../decoder/decode_data_process_common.cpp | 8 ++++---- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp b/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp index dd76be78..7a83245a 100644 --- a/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp +++ b/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp @@ -98,9 +98,9 @@ int32_t ColorFormatProcess::ProcessData(std::vector> size_t dstBufsize = sourceConfig_.GetWidth() * sourceConfig_.GetHeight() * YUV_BYTES_PER_PIXEL / Y2UV_RATIO; std::shared_ptr dstBuf = std::make_shared(dstBufsize); - ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), - sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), sourceConfig_.GetWidth() * sourceConfig_.GetHeight(), - dstBuf->Size(), dstBuf->Data() }; + ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), processedConfig_.GetWidth(), + processedConfig_.GetHeight(), processedConfig_.GetWidth(), processedConfig_.GetHeight(), + processedConfig_.GetWidth() * processedConfig_.GetHeight(), dstBuf->Size(), dstBuf->Data() }; if (ColorConvertByColorFormat(srcImgInfo, dstImgInfo) != DCAMERA_OK) { DHLOGE("ColorConvertProcess : ColorConvertByColorFormat failed."); return DCAMERA_BAD_OPERATE; @@ -108,10 +108,10 @@ int32_t ColorFormatProcess::ProcessData(std::vector> dstBuf->SetInt64("timeUs", timeStamp); dstBuf->SetInt32("Videoformat", static_cast(processedConfig_.GetVideoformat())); - dstBuf->SetInt32("alignedWidth", sourceConfig_.GetWidth()); - dstBuf->SetInt32("alignedHeight", sourceConfig_.GetHeight()); - dstBuf->SetInt32("width", sourceConfig_.GetWidth()); - dstBuf->SetInt32("height", sourceConfig_.GetHeight()); + dstBuf->SetInt32("alignedWidth", processedConfig_.GetWidth()); + dstBuf->SetInt32("alignedHeight", processedConfig_.GetHeight()); + dstBuf->SetInt32("width", processedConfig_.GetWidth()); + dstBuf->SetInt32("height", processedConfig_.GetHeight()); std::vector> outputBuffers; outputBuffers.push_back(dstBuf); diff --git a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp index 9405fa0e..9286455e 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process.cpp @@ -514,9 +514,9 @@ void DecodeDataProcess::CopyDecodedImage(const sptr& surBuf, int6 ImageUnitInfo srcImgInfo = { processedConfig_.GetVideoformat(), sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), alignedWidth, alignedHeight, static_cast(alignedWidth * alignedHeight), surBuf->GetSize(), addr }; - ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), sourceConfig_.GetWidth(), - sourceConfig_.GetHeight(), sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), - sourceConfig_.GetWidth() * sourceConfig_.GetHeight(), bufferOutput->Size(), bufferOutput->Data() }; + ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), processedConfig_.GetWidth(), + processedConfig_.GetHeight(), processedConfig_.GetWidth(), processedConfig_.GetHeight(), + processedConfig_.GetWidth() * processedConfig_.GetHeight(), bufferOutput->Size(), bufferOutput->Data() }; int32_t retRow = CopyYUVPlaneByRow(srcImgInfo, dstImgInfo); if (retRow != DCAMERA_OK) { DHLOGE("memcpy_s surface buffer failed."); @@ -526,10 +526,10 @@ void DecodeDataProcess::CopyDecodedImage(const sptr& surBuf, int6 bufferOutput->SetInt64("timeUs", timeStampUs); bufferOutput->SetInt32("Videoformat", static_cast(processedConfig_.GetVideoformat())); - bufferOutput->SetInt32("alignedWidth", sourceConfig_.GetWidth()); - bufferOutput->SetInt32("alignedHeight", sourceConfig_.GetHeight()); - bufferOutput->SetInt32("width", sourceConfig_.GetWidth()); - bufferOutput->SetInt32("height", sourceConfig_.GetHeight()); + bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth()); + bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight()); + bufferOutput->SetInt32("width", processedConfig_.GetWidth()); + bufferOutput->SetInt32("height", processedConfig_.GetHeight()); PostOutputDataBuffers(bufferOutput); } diff --git a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp index ea882095..a559c0e7 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp @@ -499,10 +499,10 @@ void DecodeDataProcess::CopyDecodedImage(const sptr& surBuf, int6 } bufferOutput->SetInt64("timeUs", timeStampUs); bufferOutput->SetInt32("Videoformat", static_cast(processedConfig_.GetVideoformat())); - bufferOutput->SetInt32("alignedWidth", sourceConfig_.GetWidth()); - bufferOutput->SetInt32("alignedHeight", sourceConfig_.GetHeight()); - bufferOutput->SetInt32("width", sourceConfig_.GetWidth()); - bufferOutput->SetInt32("height", sourceConfig_.GetHeight()); + bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth()); + bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight()); + bufferOutput->SetInt32("width", processedConfig_.GetWidth()); + bufferOutput->SetInt32("height", processedConfig_.GetHeight()); PostOutputDataBuffers(bufferOutput); } -- Gitee From 5a904c6ae6f89a7a7eae4907423aca4b26f3d030 Mon Sep 17 00:00:00 2001 From: t00605578 Date: Fri, 6 May 2022 14:55:28 +0800 Subject: [PATCH 4/4] Adapts to the ColorConvertNode upgrade - 1 Signed-off-by: t00605578 --- .../pipeline_node/colorspace_conversion/color_format_process.h | 2 +- .../colorspace_conversion/color_format_process.cpp | 2 +- .../src/pipeline_node/fpscontroller/fps_controller_process.cpp | 2 +- .../multimedia_codec/decoder/decode_data_process_common.cpp | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/data_process/include/pipeline_node/colorspace_conversion/color_format_process.h b/services/data_process/include/pipeline_node/colorspace_conversion/color_format_process.h index 19a981fe..4cf3cc91 100644 --- a/services/data_process/include/pipeline_node/colorspace_conversion/color_format_process.h +++ b/services/data_process/include/pipeline_node/colorspace_conversion/color_format_process.h @@ -25,7 +25,7 @@ namespace OHOS { namespace DistributedHardware { -class ColorFormatProcess : public AbstractDataProcess{ +class ColorFormatProcess : public AbstractDataProcess { public: explicit ColorFormatProcess(const std::weak_ptr& callbackPipSource) : callbackPipelineSource_(callbackPipSource) {} diff --git a/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp b/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp index 7a83245a..c7e113c4 100644 --- a/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp +++ b/services/data_process/src/pipeline_node/colorspace_conversion/color_format_process.cpp @@ -425,7 +425,7 @@ int32_t ColorFormatProcess::ColorConvertByColorFormat(const ImageUnitInfo& srcIm int32_t ret; switch (srcImgInfo.colorFormat) { case Videoformat::NV12: - switch(dstImgInfo.colorFormat) { + switch (dstImgInfo.colorFormat) { case Videoformat::NV21: ret = ColorConvertNV12ToNV21(srcImgInfo, dstImgInfo); break; diff --git a/services/data_process/src/pipeline_node/fpscontroller/fps_controller_process.cpp b/services/data_process/src/pipeline_node/fpscontroller/fps_controller_process.cpp index e660d7b0..9017b797 100644 --- a/services/data_process/src/pipeline_node/fpscontroller/fps_controller_process.cpp +++ b/services/data_process/src/pipeline_node/fpscontroller/fps_controller_process.cpp @@ -30,7 +30,7 @@ FpsControllerProcess::~FpsControllerProcess() } int32_t FpsControllerProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, - VideoConfigParams& processedConfig) + VideoConfigParams& processedConfig) { if (targetConfig.GetFrameRate() > MAX_TARGET_FRAME_RATE) { DHLOGE("The target framerate : %d is greater than the max framerate : %d.", diff --git a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp index a559c0e7..e5815ec6 100644 --- a/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp +++ b/services/data_process/src/pipeline_node/multimedia_codec/decoder/decode_data_process_common.cpp @@ -32,7 +32,7 @@ DecodeDataProcess::~DecodeDataProcess() } } -int32_t DecodeDataProcess::DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, +int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig, VideoConfigParams& processedConfig) { DHLOGD("Common Init DCamera DecodeNode start."); -- Gitee