diff --git a/entry/src/main/cpp/capbilities/Muxer.cpp b/entry/src/main/cpp/capbilities/Muxer.cpp index 1bcd8b362798ee13a9ff609aebdc1f0600394504..528fddb87d7cc95d21d6189d82795393e3e88cbb 100644 --- a/entry/src/main/cpp/capbilities/Muxer.cpp +++ b/entry/src/main/cpp/capbilities/Muxer.cpp @@ -23,24 +23,21 @@ namespace { constexpr int32_t CAMERA_ANGLE = 90; } -Muxer::~Muxer() -{ - Release(); -} +Muxer::~Muxer() { Release(); } -int32_t Muxer::Create(int32_t fd) -{ +// [Start format_path] +// Create an encapsulator instance object and set the encapsulation format to mp4 +int32_t Muxer::Create(int32_t fd) { muxer_ = OH_AVMuxer_Create(fd, AV_OUTPUT_FORMAT_MPEG_4); CHECK_AND_RETURN_RET_LOG(muxer_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Muxer create failed, fd: %{public}d", fd); return AVCODEC_SAMPLE_ERR_OK; } -int32_t Muxer::Config(SampleInfo &sampleInfo) -{ +int32_t Muxer::Config(SampleInfo &sampleInfo) { CHECK_AND_RETURN_RET_LOG(muxer_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Muxer is null"); - OH_AVFormat *formatVideo = OH_AVFormat_CreateVideoFormat(sampleInfo.videoCodecMime.data(), - sampleInfo.videoWidth, sampleInfo.videoHeight); + OH_AVFormat *formatVideo = + OH_AVFormat_CreateVideoFormat(sampleInfo.videoCodecMime.data(), sampleInfo.videoWidth, sampleInfo.videoHeight); CHECK_AND_RETURN_RET_LOG(formatVideo != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Create video format failed"); OH_AVFormat_SetDoubleValue(formatVideo, OH_MD_KEY_FRAME_RATE, sampleInfo.frameRate); @@ -54,17 +51,17 @@ int32_t Muxer::Config(SampleInfo &sampleInfo) OH_AVFormat_SetIntValue(formatVideo, OH_MD_KEY_TRANSFER_CHARACTERISTICS, sampleInfo.transfer); OH_AVFormat_SetIntValue(formatVideo, OH_MD_KEY_MATRIX_COEFFICIENTS, sampleInfo.matrix); } - - int32_t ret = OH_AVMuxer_AddTrack(muxer_, &videoTrackId_, formatVideo); + + int32_t ret = OH_AVMuxer_AddTrack(muxer_, &videoTrackId_, formatVideo); OH_AVFormat_Destroy(formatVideo); formatVideo = nullptr; OH_AVMuxer_SetRotation(muxer_, CAMERA_ANGLE); CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "AddTrack failed"); return AVCODEC_SAMPLE_ERR_OK; } +// [End format_path] -int32_t Muxer::Start() -{ +int32_t Muxer::Start() { CHECK_AND_RETURN_RET_LOG(muxer_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Muxer is null"); int ret = OH_AVMuxer_Start(muxer_); @@ -72,11 +69,10 @@ int32_t Muxer::Start() return AVCODEC_SAMPLE_ERR_OK; } -int32_t Muxer::WriteSample(OH_AVBuffer *buffer, OH_AVCodecBufferAttr &attr) -{ +int32_t Muxer::WriteSample(OH_AVBuffer *buffer, OH_AVCodecBufferAttr &attr) { CHECK_AND_RETURN_RET_LOG(muxer_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Muxer is null"); CHECK_AND_RETURN_RET_LOG(buffer != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Get a empty buffer"); - + int32_t ret = OH_AVBuffer_SetBufferAttr(buffer, &attr); CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "SetBufferAttr failed"); @@ -85,8 +81,7 @@ int32_t Muxer::WriteSample(OH_AVBuffer *buffer, OH_AVCodecBufferAttr &attr) return AVCODEC_SAMPLE_ERR_OK; } -int32_t Muxer::Release() -{ +int32_t Muxer::Release() { if (muxer_ != nullptr) { OH_AVMuxer_Destroy(muxer_); muxer_ = nullptr; diff --git a/entry/src/main/cpp/capbilities/VideoDecoder.cpp b/entry/src/main/cpp/capbilities/VideoDecoder.cpp index 7cfd1672bfe966c586e7e6ed244fba6bd5a8671c..bb3f060eeb8ddb9c7904492f47118e6cb2a50f70 100644 --- a/entry/src/main/cpp/capbilities/VideoDecoder.cpp +++ b/entry/src/main/cpp/capbilities/VideoDecoder.cpp @@ -22,31 +22,36 @@ namespace { constexpr int LIMIT_LOGD_FREQUENCY = 50; } // namespace -VideoDecoder::~VideoDecoder() -{ - Release(); -} +VideoDecoder::~VideoDecoder() { Release(); } -int32_t VideoDecoder::Create(const std::string &videoCodecMime) -{ +// Development using the system codec AVCodec +// [Start create_decoder] +// Create a decoder instance object +int32_t VideoDecoder::Create(const std::string &videoCodecMime) { decoder_ = OH_VideoDecoder_CreateByMime(videoCodecMime.c_str()); CHECK_AND_RETURN_RET_LOG(decoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Create failed"); return AVCODEC_SAMPLE_ERR_OK; } - -int32_t VideoDecoder::SetCallback(CodecUserData *codecUserData) -{ +// [End create_decoder] + +// [Start set_callback] +// Setting the callback function +int32_t VideoDecoder::SetCallback(CodecUserData *codecUserData) { int32_t ret = AV_ERR_OK; ret = OH_VideoDecoder_RegisterCallback(decoder_, - {SampleCallback::OnCodecError, SampleCallback::OnCodecFormatChange, - SampleCallback::OnNeedInputBuffer, SampleCallback::OnNewOutputBuffer}, codecUserData); + {SampleCallback::OnCodecError, SampleCallback::OnCodecFormatChange, + SampleCallback::OnNeedInputBuffer, SampleCallback::OnNewOutputBuffer}, + codecUserData); CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Set callback failed, ret: %{public}d", ret); return AVCODEC_SAMPLE_ERR_OK; } +// [End set_callback] -int32_t VideoDecoder::Configure(const SampleInfo &sampleInfo) -{ +// [Start set_decoder] +// Setting the Decoder +int32_t VideoDecoder::Configure(const SampleInfo &sampleInfo) { + // [StartExclude set_decoder] OH_AVFormat *format = OH_AVFormat_Create(); CHECK_AND_RETURN_RET_LOG(format != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "AVFormat create failed"); @@ -60,34 +65,40 @@ int32_t VideoDecoder::Configure(const SampleInfo &sampleInfo) AVCODEC_SAMPLE_LOGI("%{public}d*%{public}d, %{public}.1ffps", sampleInfo.videoWidth, sampleInfo.videoHeight, sampleInfo.frameRate); AVCODEC_SAMPLE_LOGI("====== VideoDecoder config ======"); - + // [EndExclude set_decoder] int ret = OH_VideoDecoder_Configure(decoder_, format); + // [StartExclude set_decoder] OH_AVFormat_Destroy(format); format = nullptr; CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Config failed, ret: %{public}d", ret); return AVCODEC_SAMPLE_ERR_OK; + // [EndExclude set_decoder] } +// [End set_decoder] -int32_t VideoDecoder::Config(const SampleInfo &sampleInfo, CodecUserData *codecUserData) -{ +// [Start decoder_ready] +int32_t VideoDecoder::Config(const SampleInfo &sampleInfo, CodecUserData *codecUserData) { + // [StartExclude decoder_ready] CHECK_AND_RETURN_RET_LOG(decoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Decoder is null"); CHECK_AND_RETURN_RET_LOG(codecUserData != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Invalid param: codecUserData"); // Configure video decoder int32_t ret = Configure(sampleInfo); CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Configure failed"); - + // [EndExclude decoder_ready] // SetSurface from video decoder if (sampleInfo.window != nullptr) { int ret = OH_VideoDecoder_SetSurface(decoder_, sampleInfo.window); CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK && sampleInfo.window, AVCODEC_SAMPLE_ERR_ERROR, - "Set surface failed, ret: %{public}d", ret); + "Set surface failed, ret: %{public}d", ret); } - + + // [StartExclude decoder_ready] // SetCallback for video decoder ret = SetCallback(codecUserData); CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Set callback failed, ret: %{public}d", ret); + // [EndExclude decoder_ready] // Prepare video decoder { @@ -97,28 +108,34 @@ int32_t VideoDecoder::Config(const SampleInfo &sampleInfo, CodecUserData *codecU return AVCODEC_SAMPLE_ERR_OK; } - -int32_t VideoDecoder::Start() -{ +// [End decoder_ready] + +// [Start start_decoder] +// Start the Decoder +int32_t VideoDecoder::Start() { CHECK_AND_RETURN_RET_LOG(decoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Decoder is null"); int ret = OH_VideoDecoder_Start(decoder_); CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Start failed, ret: %{public}d", ret); return AVCODEC_SAMPLE_ERR_OK; } +// [End start_decoder] -int32_t VideoDecoder::PushInputBuffer(CodecBufferInfo &info) -{ +// [Start write_decoded] +// Write the decoded stream +int32_t VideoDecoder::PushInputBuffer(CodecBufferInfo &info) { CHECK_AND_RETURN_RET_LOG(decoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Decoder is null"); int32_t ret = OH_VideoDecoder_PushInputBuffer(decoder_, info.bufferIndex); CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Push input data failed"); return AVCODEC_SAMPLE_ERR_OK; } +// [End write_decoded] -int32_t VideoDecoder::FreeOutputBuffer(uint32_t bufferIndex, bool render) -{ +// [Start release_decoded] +// Render and release decoded frames +int32_t VideoDecoder::FreeOutputBuffer(uint32_t bufferIndex, bool render) { CHECK_AND_RETURN_RET_LOG(decoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Decoder is null"); - + int32_t ret = AVCODEC_SAMPLE_ERR_OK; if (render) { ret = OH_VideoDecoder_RenderOutputBuffer(decoder_, bufferIndex); @@ -128,9 +145,11 @@ int32_t VideoDecoder::FreeOutputBuffer(uint32_t bufferIndex, bool render) CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Free output data failed"); return AVCODEC_SAMPLE_ERR_OK; } +// [End release_decoded] -int32_t VideoDecoder::Release() -{ +// [Start release_resources] +// Destroy the decoder instance and release resources +int32_t VideoDecoder::Release() { if (decoder_ != nullptr) { OH_VideoDecoder_Flush(decoder_); OH_VideoDecoder_Stop(decoder_); @@ -138,4 +157,5 @@ int32_t VideoDecoder::Release() decoder_ = nullptr; } return AVCODEC_SAMPLE_ERR_OK; -} \ No newline at end of file +} +// [End release_resources] \ No newline at end of file diff --git a/entry/src/main/cpp/capbilities/VideoEncoder.cpp b/entry/src/main/cpp/capbilities/VideoEncoder.cpp index cd1c5f242d18a4c80b829120fef336e8923d74bb..b6e6758722fc92b9d10478cbeb18022b7e61773b 100644 --- a/entry/src/main/cpp/capbilities/VideoEncoder.cpp +++ b/entry/src/main/cpp/capbilities/VideoEncoder.cpp @@ -18,20 +18,18 @@ #undef LOG_TAG #define LOG_TAG "VideoEncoder" -VideoEncoder::~VideoEncoder() -{ - Release(); -} +VideoEncoder::~VideoEncoder() { Release(); } -int32_t VideoEncoder::Create(const std::string &videoCodecMime) -{ +// [Start encoder_initialization] +// Create a video coder and initialize it +int32_t VideoEncoder::Create(const std::string &videoCodecMime) { encoder_ = OH_VideoEncoder_CreateByMime(videoCodecMime.c_str()); CHECK_AND_RETURN_RET_LOG(encoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Create failed"); return AVCODEC_SAMPLE_ERR_OK; } +// [End encoder_initialization] -int32_t VideoEncoder::Config(SampleInfo &sampleInfo, CodecUserData *codecUserData) -{ +int32_t VideoEncoder::Config(SampleInfo &sampleInfo, CodecUserData *codecUserData) { CHECK_AND_RETURN_RET_LOG(encoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Encoder is null"); CHECK_AND_RETURN_RET_LOG(codecUserData != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Invalid param: codecUserData"); @@ -42,7 +40,7 @@ int32_t VideoEncoder::Config(SampleInfo &sampleInfo, CodecUserData *codecUserDat // GetSurface from video encoder ret = GetSurface(sampleInfo); CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Get surface failed"); - + // SetCallback for video encoder ret = SetCallback(codecUserData); CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, @@ -55,37 +53,36 @@ int32_t VideoEncoder::Config(SampleInfo &sampleInfo, CodecUserData *codecUserDat return AVCODEC_SAMPLE_ERR_OK; } -int32_t VideoEncoder::Start() -{ +// [Start start_encoder] +// Start Encoder +int32_t VideoEncoder::Start() { CHECK_AND_RETURN_RET_LOG(encoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Encoder is null"); int ret = OH_VideoEncoder_Start(encoder_); CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Start failed, ret: %{public}d", ret); return AVCODEC_SAMPLE_ERR_OK; } +// [End start_encoder] -int32_t VideoEncoder::FreeOutputBuffer(uint32_t bufferIndex) -{ +int32_t VideoEncoder::FreeOutputBuffer(uint32_t bufferIndex) { CHECK_AND_RETURN_RET_LOG(encoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Encoder is null"); int32_t ret = OH_VideoEncoder_FreeOutputBuffer(encoder_, bufferIndex); - CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, - "Free output data failed, ret: %{public}d", ret); + CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Free output data failed, ret: %{public}d", + ret); return AVCODEC_SAMPLE_ERR_OK; } -int32_t VideoEncoder::NotifyEndOfStream() -{ +int32_t VideoEncoder::NotifyEndOfStream() { CHECK_AND_RETURN_RET_LOG(encoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Encoder is null"); int32_t ret = OH_VideoEncoder_NotifyEndOfStream(encoder_); - CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, - "Notify end of stream failed, ret: %{public}d", ret); + CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Notify end of stream failed, ret: %{public}d", + ret); return AVCODEC_SAMPLE_ERR_OK; } -int32_t VideoEncoder::Stop() -{ +int32_t VideoEncoder::Stop() { CHECK_AND_RETURN_RET_LOG(encoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Encoder is null"); int ret = OH_VideoEncoder_Flush(encoder_); @@ -96,8 +93,7 @@ int32_t VideoEncoder::Stop() return AVCODEC_SAMPLE_ERR_OK; } -int32_t VideoEncoder::Release() -{ +int32_t VideoEncoder::Release() { if (encoder_ != nullptr) { OH_VideoEncoder_Destroy(encoder_); encoder_ = nullptr; @@ -105,19 +101,20 @@ int32_t VideoEncoder::Release() return AVCODEC_SAMPLE_ERR_OK; } -int32_t VideoEncoder::SetCallback(CodecUserData *codecUserData) -{ - int32_t ret = OH_VideoEncoder_RegisterCallback(encoder_, - {SampleCallback::OnCodecError, SampleCallback::OnCodecFormatChange, - SampleCallback::OnNeedInputBuffer, SampleCallback::OnNewOutputBuffer}, - codecUserData); +int32_t VideoEncoder::SetCallback(CodecUserData *codecUserData) { + int32_t ret = + OH_VideoEncoder_RegisterCallback(encoder_, + {SampleCallback::OnCodecError, SampleCallback::OnCodecFormatChange, + SampleCallback::OnNeedInputBuffer, SampleCallback::OnNewOutputBuffer}, + codecUserData); CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Set callback failed, ret: %{public}d", ret); return AVCODEC_SAMPLE_ERR_OK; } -int32_t VideoEncoder::Configure(const SampleInfo &sampleInfo) -{ +// Camera+AVCodec +// [Start camera_AVCodec] +int32_t VideoEncoder::Configure(const SampleInfo &sampleInfo) { OH_AVFormat *format = OH_AVFormat_Create(); CHECK_AND_RETURN_RET_LOG(format != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "AVFormat create failed"); @@ -128,6 +125,8 @@ int32_t VideoEncoder::Configure(const SampleInfo &sampleInfo) OH_AVFormat_SetIntValue(format, OH_MD_KEY_VIDEO_ENCODE_BITRATE_MODE, sampleInfo.bitrateMode); OH_AVFormat_SetLongValue(format, OH_MD_KEY_BITRATE, sampleInfo.bitrate); OH_AVFormat_SetIntValue(format, OH_MD_KEY_PROFILE, sampleInfo.hevcProfile); + // [EndExclude camera_AVCodec] + // Setting HDRVivid-related parameters if (sampleInfo.isHDRVivid) { OH_AVFormat_SetIntValue(format, OH_MD_KEY_I_FRAME_INTERVAL, sampleInfo.iFrameInterval); OH_AVFormat_SetIntValue(format, OH_MD_KEY_RANGE_FLAG, sampleInfo.rangFlag); @@ -135,25 +134,30 @@ int32_t VideoEncoder::Configure(const SampleInfo &sampleInfo) OH_AVFormat_SetIntValue(format, OH_MD_KEY_TRANSFER_CHARACTERISTICS, sampleInfo.transfer); OH_AVFormat_SetIntValue(format, OH_MD_KEY_MATRIX_COEFFICIENTS, sampleInfo.matrix); } + // [StartExclude camera_AVCodec] AVCODEC_SAMPLE_LOGI("====== VideoEncoder config ======"); - AVCODEC_SAMPLE_LOGI("%{public}d*%{public}d, %{public}.1ffps", - sampleInfo.videoWidth, sampleInfo.videoHeight, sampleInfo.frameRate); + AVCODEC_SAMPLE_LOGI("%{public}d*%{public}d, %{public}.1ffps", sampleInfo.videoWidth, sampleInfo.videoHeight, + sampleInfo.frameRate); // 1024: ratio of kbps to bps - AVCODEC_SAMPLE_LOGI("BitRate Mode: %{public}d, BitRate: %{public}" PRId64 "kbps", - sampleInfo.bitrateMode, sampleInfo.bitrate / 1024); + AVCODEC_SAMPLE_LOGI("BitRate Mode: %{public}d, BitRate: %{public}" PRId64 "kbps", sampleInfo.bitrateMode, + sampleInfo.bitrate / 1024); AVCODEC_SAMPLE_LOGI("====== VideoEncoder config ======"); + // [Start set_encoder] + // Setting the Encoder int ret = OH_VideoEncoder_Configure(encoder_, format); + // [End set_encoder] OH_AVFormat_Destroy(format); format = nullptr; CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "Config failed, ret: %{public}d", ret); return AVCODEC_SAMPLE_ERR_OK; + // [EndExclude camera_AVCodec] } +// [End camera_AVCodec] -int32_t VideoEncoder::GetSurface(SampleInfo &sampleInfo) -{ +int32_t VideoEncoder::GetSurface(SampleInfo &sampleInfo) { int32_t ret = OH_VideoEncoder_GetSurface(encoder_, &sampleInfo.window); CHECK_AND_RETURN_RET_LOG(ret == AV_ERR_OK && sampleInfo.window, AVCODEC_SAMPLE_ERR_ERROR, - "Get surface failed, ret: %{public}d", ret); + "Get surface failed, ret: %{public}d", ret); return AVCODEC_SAMPLE_ERR_OK; } \ No newline at end of file diff --git a/entry/src/main/ets/common/utils/CameraCheck.ets b/entry/src/main/ets/common/utils/CameraCheck.ets index 8b1384a0009b6ded4d6fc5ba977d9e31312cf96e..8320765db763d7438defb36c9153e40f83b5c429 100644 --- a/entry/src/main/ets/common/utils/CameraCheck.ets +++ b/entry/src/main/ets/common/utils/CameraCheck.ets @@ -70,35 +70,45 @@ export function previewProfileCameraCheck(cameraManager: camera.CameraManager, return previewProfile; } +// [Start create_video_output2] export function videoProfileCheck(cameraManager: camera.CameraManager, cameraData: CameraDataModel): undefined | camera.VideoProfile { let cameraDevices = cameraManager.getSupportedCameras(); + // [StartExclude create_video_output2] if (cameraDevices !== undefined && cameraDevices.length <= 0) { Logger.error(TAG, 'cameraManager.getSupportedCameras error!'); return; } + // [EndExclude create_video_output2] let profiles: camera.CameraOutputCapability = cameraManager.getSupportedOutputCapability(cameraDevices[0], camera.SceneMode.NORMAL_VIDEO); + // [StartExclude create_video_output2] if (!profiles) { Logger.error(TAG, 'cameraManager.getSupportedOutputCapability error!'); return; } + // [EndExclude create_video_output2] let videoProfiles: Array = profiles.videoProfiles; + // [StartExclude create_video_output2] if (!videoProfiles) { Logger.error(TAG, 'Get videoProfiles error!'); return; } + // [EndExclude create_video_output2] let videoProfile: undefined | camera.VideoProfile = videoProfiles.find((profile: camera.VideoProfile) => { if (cameraData.isHDRVivid) { + // [StartExclude create_video_output2] if (cameraData.frameRate === Const.FRAMERATE_VIDEO_30FPS) { + // [EndExclude create_video_output2] return profile.size.width === cameraData.cameraWidth && profile.size.height === cameraData.cameraHeight && profile.format === camera.CameraFormat.CAMERA_FORMAT_YCBCR_P010 && profile.frameRateRange.min === 1 && profile.frameRateRange.max === 30; + // [StartExclude create_video_output2] } else { return profile.size.width === cameraData.cameraWidth && profile.size.height === cameraData.cameraHeight && @@ -106,7 +116,9 @@ export function videoProfileCheck(cameraManager: camera.CameraManager, profile.frameRateRange.min === cameraData.frameRate && profile.frameRateRange.max === cameraData.frameRate; } + // [EndExclude create_video_output2] } else { + // [StartExclude create_video_output2] if (cameraData.frameRate === Const.FRAMERATE_VIDEO_30FPS) { return profile.size.width === cameraData.cameraWidth && profile.size.height === cameraData.cameraHeight && @@ -120,7 +132,9 @@ export function videoProfileCheck(cameraManager: camera.CameraManager, profile.frameRateRange.min === cameraData.frameRate && profile.frameRateRange.max === cameraData.frameRate; } + // [EndExclude create_video_output2] } }); return videoProfile; -} \ No newline at end of file +} +// [End create_video_output2] \ No newline at end of file diff --git a/entry/src/main/ets/pages/Recorder.ets b/entry/src/main/ets/pages/Recorder.ets index 54aaacb31f01fcb1fc151cdaf981844263625d32..79c030d2990bd48560afa249598a6bc1a13caf1d 100644 --- a/entry/src/main/ets/pages/Recorder.ets +++ b/entry/src/main/ets/pages/Recorder.ets @@ -60,6 +60,7 @@ async function releaseCamera(): Promise { videoSession.release(); } +// [Start set_video_color] function isVideoStabilizationModeSupported(session: camera.VideoSession, mode: camera.VideoStabilizationMode): boolean { let isSupported: boolean = false; try { @@ -78,6 +79,7 @@ function setVideoStabilizationMode(session: camera.VideoSession): boolean { let isSupported: boolean = isVideoStabilizationModeSupported(session, mode); if (isSupported) { Logger.info(TAG, `setVideoStabilizationMode: ${mode}`); + // Setting video anti-shake session.setVideoStabilizationMode(mode); let activeVideoStabilizationMode = session.getActiveVideoStabilizationMode(); Logger.info(TAG, `activeVideoStabilizationMode: ${activeVideoStabilizationMode}`); @@ -98,6 +100,7 @@ function getSupportedColorSpaces(session: camera.VideoSession): Array = getSupportedColorSpaces(session); @@ -111,6 +114,7 @@ function setColorSpaceBeforeCommitConfig(session: camera.VideoSession, isHdr: nu Logger.info(TAG, `colorSpace: ${colorSpace} is not support`); } } +// [End set_video_color] @Entry @Component @@ -191,18 +195,23 @@ struct Recorder { return; } + // [Start create_video_output3] let videoProfile: undefined | camera.VideoProfile = videoProfileCheck(cameraManager, params); if (!videoProfile) { Logger.error(TAG, 'videoProfile is not found!'); return; } - // The preview stream of XComponent. + // [StartExclude create_video_output3] + //The preview stream of XComponent. + // [Start camera_conversation] let XComponentPreviewProfile: camera.Profile | undefined = previewProfileCameraCheck(cameraManager, params); if (XComponentPreviewProfile === undefined) { Logger.error(TAG, 'XComponentPreviewProfile is not found'); return; } + // [StartExclude camera_conversation] + // [EndExclude create_video_output3] // Create the encoder output object encoderVideoOutput = cameraManager.createVideoOutput(videoProfile, params.surfaceId); @@ -211,6 +220,7 @@ struct Recorder { return; } Logger.info(TAG, 'encoderVideoOutput success'); + // [End create_video_output3] // Create a preview stream output object XComponentPreviewOutput = cameraManager.createPreviewOutput(XComponentPreviewProfile, this.XComponentSurfaceId); @@ -238,57 +248,71 @@ struct Recorder { let err = error as BusinessError; Logger.error(TAG, `Failed to open cameraInput. error: ${JSON.stringify(err)}`); } + // [EndExclude camera_conversation] - // Session flow. + // Create a session flow try { videoSession = cameraManager.createSession(camera.SceneMode.NORMAL_VIDEO) as camera.VideoSession; } catch (error) { let err = error as BusinessError; Logger.error(TAG, `Failed to create the session instance. error: ${JSON.stringify(err)}`); } + // [StartExclude camera_conversation] if (videoSession === undefined) { Logger.error(TAG, 'videoSession is undefined'); return; } + // [EndExclude camera_conversation] // Start Configuring the session. try { videoSession.beginConfig(); } catch (error) { + // [StartExclude camera_conversation] let err = error as BusinessError; Logger.error(TAG, `Failed to beginConfig. error: ${JSON.stringify(err)}`); + // [EndExclude camera_conversation] } + // [StartExclude camera_conversation] // Add CameraInput to the session. try { videoSession.addInput(cameraInput); } catch (error) { + // DocsDot let err = error as BusinessError; Logger.error(TAG, `Failed to add cameraInput. error: ${JSON.stringify(err)}`); + // DocsDot } - + // [EndExclude camera_conversation] // Add the XComponent preview stream to the session. try { videoSession.addOutput(XComponentPreviewOutput); } catch (error) { + // [StartExclude camera_conversation] let err = error as BusinessError; Logger.error(TAG, `Failed to add XcomponentPreviewOutput. error: ${JSON.stringify(err)}`); + // [EndExclude camera_conversation] } // Add the encoder video stream to the session. try { videoSession.addOutput(encoderVideoOutput); } catch (error) { + // [StartExclude camera_conversation] let err = error as BusinessError; Logger.error(TAG, `Failed to add encoderVideoOutput. error: ${JSON.stringify(err)}`); + // [EndExclude camera_conversation] } // Submit configuration information. try { await videoSession.commitConfig(); } catch (error) { + // [StartExclude camera_conversation] let err = error as BusinessError; Logger.error(TAG, `videoSession commitConfig error: ${JSON.stringify(err)}`); + // [EndExclude camera_conversation] } // Set video stabilization. @@ -301,18 +325,23 @@ struct Recorder { try { await videoSession.start(); } catch (error) { + // [StartExclude camera_conversation] let err = error as BusinessError; Logger.error(TAG, `videoSession start error: ${JSON.stringify(err)}`); + // [EndExclude camera_conversation] } // Start the video output stream encoderVideoOutput.start((err: BusinessError) => { + // [StartExclude camera_conversation] if (err) { Logger.error(TAG, `Failed to start the encoder video output. error: ${JSON.stringify(err)}`); return; } Logger.info(TAG, 'Callback invoked to indicate the encoder video output start success.'); + // [EndExclude camera_conversation] }); + // [End camera_conversation] } build() {