1 /*
2  * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <cmath>
17 #include "dcamera_hisysevent_adapter.h"
18 #include "dcamera_utils_tools.h"
19 #include "distributed_hardware_log.h"
20 #include "encode_data_process.h"
21 #include "encode_video_callback.h"
22 #include "graphic_common_c.h"
23 #include <ctime>
24 
25 #ifndef DH_LOG_TAG
26 #define DH_LOG_TAG "DCDP_NODE_ENCODEC"
27 #endif
28 
29 namespace OHOS {
30 namespace DistributedHardware {
31 const std::map<int64_t, int32_t> EncodeDataProcess::ENCODER_BITRATE_TABLE = {
32     std::map<int64_t, int32_t>::value_type(WIDTH_320_HEIGHT_240, BITRATE_500000),
33     std::map<int64_t, int32_t>::value_type(WIDTH_480_HEIGHT_360, BITRATE_1110000),
34     std::map<int64_t, int32_t>::value_type(WIDTH_640_HEIGHT_360, BITRATE_1500000),
35     std::map<int64_t, int32_t>::value_type(WIDTH_640_HEIGHT_480, BITRATE_1800000),
36     std::map<int64_t, int32_t>::value_type(WIDTH_720_HEIGHT_540, BITRATE_2100000),
37     std::map<int64_t, int32_t>::value_type(WIDTH_960_HEIGHT_540, BITRATE_2300000),
38     std::map<int64_t, int32_t>::value_type(WIDTH_960_HEIGHT_720, BITRATE_2800000),
39     std::map<int64_t, int32_t>::value_type(WIDTH_1280_HEIGHT_720, BITRATE_3400000),
40     std::map<int64_t, int32_t>::value_type(WIDTH_1440_HEIGHT_1080, BITRATE_5000000),
41     std::map<int64_t, int32_t>::value_type(WIDTH_1920_HEIGHT_1080, BITRATE_6000000),
42 };
43 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
44     "YUVI420", "NV12", "NV21", "RGBA_8888"
45 };
46 
~EncodeDataProcess()47 EncodeDataProcess::~EncodeDataProcess()
48 {
49     if (isEncoderProcess_.load()) {
50         DHLOGD("~EncodeDataProcess : ReleaseProcessNode.");
51         ReleaseProcessNode();
52     }
53 }
54 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)55 int32_t EncodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
56     VideoConfigParams& processedConfig)
57 {
58     DHLOGD("Init DCamera EncodeNode start.");
59     if (!(IsInEncoderRange(sourceConfig) && IsInEncoderRange(targetConfig))) {
60         DHLOGE("Source config or target config are invalid.");
61         return DCAMERA_BAD_VALUE;
62     }
63     if (!IsConvertible(sourceConfig, targetConfig)) {
64         DHLOGE("The EncodeNode cannot convert source VideoCodecType %{public}d to target VideoCodecType %{public}d.",
65             sourceConfig.GetVideoCodecType(), targetConfig.GetVideoCodecType());
66         return DCAMERA_BAD_TYPE;
67     }
68 
69     sourceConfig_ = sourceConfig;
70     targetConfig_ = targetConfig;
71     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
72         DHLOGD("Disable EncodeNode. The target VideoCodecType %{public}d is the same as the source VideoCodecType "
73             "%{public}d.", sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType());
74         processedConfig_ = sourceConfig;
75         processedConfig = processedConfig_;
76         isEncoderProcess_.store(true);
77         return DCAMERA_OK;
78     }
79 
80     int32_t err = InitEncoder();
81     if (err != DCAMERA_OK) {
82         DHLOGE("Init video encoder failed.");
83         ReleaseProcessNode();
84         return err;
85     }
86     processedConfig = processedConfig_;
87     isEncoderProcess_.store(true);
88     return DCAMERA_OK;
89 }
90 
IsInEncoderRange(const VideoConfigParams & curConfig)91 bool EncodeDataProcess::IsInEncoderRange(const VideoConfigParams& curConfig)
92 {
93     return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH ||
94         curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT ||
95         curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE);
96 }
97 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)98 bool EncodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
99 {
100     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
101         sourceConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
102 }
103 
InitEncoder()104 int32_t EncodeDataProcess::InitEncoder()
105 {
106     DHLOGD("Init video encoder.");
107     int32_t ret = ConfigureVideoEncoder();
108     if (ret != DCAMERA_OK) {
109         DHLOGE("Init video encoder metadata format failed. ret %{public}d.", ret);
110         return ret;
111     }
112 
113     ret = StartVideoEncoder();
114     if (ret != DCAMERA_OK) {
115         DHLOGE("Start Video encoder failed.");
116         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_ENCODE_ERROR,
117             CreateMsg("start video encoder failed, width: %d, height: %d, format: %s",
118             sourceConfig_.GetWidth(), sourceConfig_.GetHeight(),
119             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
120         return ret;
121     }
122 
123     return DCAMERA_OK;
124 }
125 
ConfigureVideoEncoder()126 int32_t EncodeDataProcess::ConfigureVideoEncoder()
127 {
128     int32_t ret = InitEncoderMetadataFormat();
129     CHECK_AND_RETURN_RET_LOG(ret != DCAMERA_OK, ret,
130         "Init video encoder metadata format failed. ret %{public}d.", ret);
131     ret = InitEncoderBitrateFormat();
132     CHECK_AND_RETURN_RET_LOG(ret != DCAMERA_OK, ret,
133         "Init video encoder bitrate format failed. ret %{public}d.", ret);
134     videoEncoder_ = MediaAVCodec::VideoEncoderFactory::CreateByMime(processType_);
135     if (videoEncoder_ == nullptr) {
136         DHLOGE("Create video encoder failed.");
137         return DCAMERA_INIT_ERR;
138     }
139     encodeVideoCallback_ = std::make_shared<EncodeVideoCallback>(shared_from_this());
140     ret = videoEncoder_->SetCallback(encodeVideoCallback_);
141     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
142         DHLOGE("Set video encoder callback failed. ret %{public}d.", ret);
143         return DCAMERA_INIT_ERR;
144     }
145 
146     ret = videoEncoder_->Configure(metadataFormat_);
147     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
148         "Set video encoder metadata format failed. ret %{public}d.", ret);
149 
150     encodeProducerSurface_ = videoEncoder_->CreateInputSurface();
151     CHECK_AND_RETURN_RET_LOG(encodeProducerSurface_ == nullptr, DCAMERA_INIT_ERR,
152         "%s", "Get video encoder producer surface failed.");
153 
154     return DCAMERA_OK;
155 }
156 
InitEncoderMetadataFormat()157 int32_t EncodeDataProcess::InitEncoderMetadataFormat()
158 {
159     processedConfig_ = sourceConfig_;
160     switch (targetConfig_.GetVideoCodecType()) {
161         case VideoCodecType::CODEC_H264:
162             processType_ = "video/avc";
163             metadataFormat_.PutIntValue("codec_profile", MediaAVCodec::AVCProfile::AVC_PROFILE_BASELINE);
164             processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H264);
165             break;
166         case VideoCodecType::CODEC_H265:
167             processType_ = "video/hevc";
168             metadataFormat_.PutIntValue("codec_profile", MediaAVCodec::HEVCProfile::HEVC_PROFILE_MAIN);
169             processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H265);
170             break;
171         case VideoCodecType::CODEC_MPEG4_ES:
172             processType_ = "video/mp4v-es";
173             metadataFormat_.PutIntValue("codec_profile", MediaAVCodec::MPEG4Profile::MPEG4_PROFILE_ADVANCED_CODING);
174             processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_MPEG4_ES);
175             break;
176         default:
177             DHLOGE("The current codec type does not support encoding.");
178             return DCAMERA_NOT_FOUND;
179     }
180     switch (sourceConfig_.GetVideoformat()) {
181         case Videoformat::YUVI420:
182             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::YUVI420));
183             metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
184             break;
185         case Videoformat::NV12:
186             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV12));
187             metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
188             break;
189         case Videoformat::NV21:
190             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV21));
191             metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
192             break;
193         case Videoformat::RGBA_8888:
194             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::RGBA));
195             metadataFormat_.PutLongValue("max_input_size", NORM_RGB32_BUFFER_SIZE);
196             break;
197         default:
198             DHLOGE("The current pixel format does not support encoding.");
199             return DCAMERA_NOT_FOUND;
200     }
201     metadataFormat_.PutStringValue("codec_mime", processType_);
202     metadataFormat_.PutIntValue("width", static_cast<int32_t>(sourceConfig_.GetWidth()));
203     metadataFormat_.PutIntValue("height", static_cast<int32_t>(sourceConfig_.GetHeight()));
204     metadataFormat_.PutDoubleValue("frame_rate", MAX_FRAME_RATE);
205     return DCAMERA_OK;
206 }
207 
InitEncoderBitrateFormat()208 int32_t EncodeDataProcess::InitEncoderBitrateFormat()
209 {
210     DHLOGD("Init video encoder bitrate format.");
211     CHECK_AND_RETURN_RET_LOG(!(IsInEncoderRange(sourceConfig_) && IsInEncoderRange(targetConfig_)), DCAMERA_BAD_VALUE,
212         "%{public}s", "Source config or target config are invalid.");
213     metadataFormat_.PutIntValue("i_frame_interval", IDR_FRAME_INTERVAL_MS);
214     metadataFormat_.PutIntValue("video_encode_bitrate_mode", MediaAVCodec::VideoEncodeBitrateMode::VBR);
215 
216     CHECK_AND_RETURN_RET_LOG(ENCODER_BITRATE_TABLE.empty(), DCAMERA_OK, "%{public}s",
217         "ENCODER_BITRATE_TABLE is null, use the default bitrate of the encoder.");
218     int64_t pixelformat = static_cast<int64_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight());
219     int32_t matchedBitrate = BITRATE_6000000;
220     int64_t minPixelformatDiff = WIDTH_1920_HEIGHT_1080 - pixelformat;
221     for (auto it = ENCODER_BITRATE_TABLE.begin(); it != ENCODER_BITRATE_TABLE.end(); it++) {
222         int64_t pixelformatDiff = abs(pixelformat - it->first);
223         if (pixelformatDiff == 0) {
224             matchedBitrate = it->second;
225             break;
226         }
227         if (minPixelformatDiff >= pixelformatDiff) {
228             minPixelformatDiff = pixelformatDiff;
229             matchedBitrate = it->second;
230         }
231     }
232     DHLOGD("Source config: width : %{public}d, height : %{public}d, matched bitrate %{public}d.",
233         sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), matchedBitrate);
234     metadataFormat_.PutIntValue("bitrate", matchedBitrate);
235     return DCAMERA_OK;
236 }
237 
StartVideoEncoder()238 int32_t EncodeDataProcess::StartVideoEncoder()
239 {
240     if (videoEncoder_ == nullptr) {
241         DHLOGE("The video encoder does not exist before StopVideoEncoder.");
242         return DCAMERA_BAD_VALUE;
243     }
244 
245     int32_t ret = videoEncoder_->Prepare();
246     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
247         "Video encoder prepare failed. ret %{public}d.", ret);
248     ret = videoEncoder_->Start();
249     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
250         "Video encoder start failed. ret %{public}d.", ret);
251     return DCAMERA_OK;
252 }
253 
StopVideoEncoder()254 int32_t EncodeDataProcess::StopVideoEncoder()
255 {
256     if (videoEncoder_ == nullptr) {
257         DHLOGE("The video encoder does not exist before StopVideoEncoder.");
258         return DCAMERA_BAD_VALUE;
259     }
260 
261     bool isSuccess = true;
262     int32_t ret = videoEncoder_->Flush();
263     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
264         DHLOGE("VideoEncoder flush failed. ret %{public}d.", ret);
265         isSuccess = isSuccess && false;
266     }
267     ret = videoEncoder_->Stop();
268     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
269         DHLOGE("VideoEncoder stop failed. ret %{public}d.", ret);
270         isSuccess = isSuccess && false;
271     }
272 
273     if (!isSuccess) {
274         return DCAMERA_BAD_OPERATE;
275     }
276     return DCAMERA_OK;
277 }
278 
ReleaseVideoEncoder()279 void EncodeDataProcess::ReleaseVideoEncoder()
280 {
281     std::lock_guard<std::mutex> lck(mtxEncoderState_);
282     DHLOGD("Start release videoEncoder.");
283     if (videoEncoder_ == nullptr) {
284         DHLOGE("The video encoder does not exist before ReleaseVideoEncoder.");
285         encodeProducerSurface_ = nullptr;
286         encodeVideoCallback_ = nullptr;
287         return;
288     }
289     int32_t ret = StopVideoEncoder();
290     CHECK_AND_LOG(ret != DCAMERA_OK, "%{public}s", "StopVideoEncoder failed.");
291     ret = videoEncoder_->Release();
292     CHECK_AND_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK,
293         "VideoEncoder release failed. ret %{public}d.", ret);
294     encodeProducerSurface_ = nullptr;
295     videoEncoder_ = nullptr;
296     encodeVideoCallback_ = nullptr;
297     DHLOGD("Start release videoEncoder success.");
298 }
299 
ReleaseProcessNode()300 void EncodeDataProcess::ReleaseProcessNode()
301 {
302     DHLOGD("Start release [%{public}zu] node : EncodeNode.", nodeRank_);
303     isEncoderProcess_.store(false);
304     ReleaseVideoEncoder();
305 
306     waitEncoderOutputCount_ = 0;
307     lastFeedEncoderInputBufferTimeUs_ = 0;
308     inputTimeStampUs_ = 0;
309     processType_ = "";
310 
311     if (nextDataProcess_ != nullptr) {
312         nextDataProcess_->ReleaseProcessNode();
313         nextDataProcess_ = nullptr;
314     }
315     DHLOGD("Release [%{public}zu] node : EncodeNode end.", nodeRank_);
316 }
317 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)318 int32_t EncodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
319 {
320     DHLOGD("Process data in EncodeDataProcess.");
321     if (inputBuffers.empty() || inputBuffers[0] == nullptr) {
322         DHLOGE("The input data buffers is empty.");
323         return DCAMERA_BAD_VALUE;
324     }
325     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
326         DHLOGD("The target VideoCodecType : %{public}d is the same as the source VideoCodecType : %{public}d.",
327             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
328         return EncodeDone(inputBuffers);
329     }
330     if (videoEncoder_ == nullptr) {
331         DHLOGE("The video encoder does not exist before encoding data.");
332         return DCAMERA_INIT_ERR;
333     }
334     if (inputBuffers[0]->Size() > NORM_YUV420_BUFFER_SIZE) {
335         DHLOGE("EncodeNode input buffer size %{public}zu error.", inputBuffers[0]->Size());
336         return DCAMERA_MEMORY_OPT_ERROR;
337     }
338     CHECK_AND_RETURN_RET_LOG(!isEncoderProcess_.load(), DCAMERA_DISABLE_PROCESS, "%{public}s",
339         "EncodeNode occurred error or start release.");
340     int32_t err = FeedEncoderInputBuffer(inputBuffers[0]);
341     CHECK_AND_RETURN_RET_LOG(err != DCAMERA_OK, err, "%{public}s", "Feed encoder input Buffer failed.");
342     return DCAMERA_OK;
343 }
344 
FeedEncoderInputBuffer(std::shared_ptr<DataBuffer> & inputBuffer)345 int32_t EncodeDataProcess::FeedEncoderInputBuffer(std::shared_ptr<DataBuffer>& inputBuffer)
346 {
347     std::lock_guard<std::mutex> lck(mtxEncoderState_);
348     DHLOGD("Feed encoder input buffer, buffer size %{public}zu.", inputBuffer->Size());
349     CHECK_AND_RETURN_RET_LOG(encodeProducerSurface_ == nullptr, DCAMERA_INIT_ERR, "%{public}s",
350         "Get encoder input producer surface failed.");
351     sptr<SurfaceBuffer> surfacebuffer = GetEncoderInputSurfaceBuffer();
352     CHECK_AND_RETURN_RET_LOG(surfacebuffer == nullptr, DCAMERA_BAD_OPERATE, "%{public}s",
353         "Get encoder input producer surface buffer failed.");
354     uint8_t *addr = static_cast<uint8_t *>(surfacebuffer->GetVirAddr());
355     if (addr == nullptr) {
356         DHLOGE("SurfaceBuffer address is nullptr");
357         encodeProducerSurface_->CancelBuffer(surfacebuffer);
358         return DCAMERA_BAD_OPERATE;
359     }
360     size_t size = static_cast<size_t>(surfacebuffer->GetSize());
361     errno_t err = memcpy_s(addr, size, inputBuffer->Data(), inputBuffer->Size());
362     CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR,
363         "memcpy_s encoder input producer surfacebuffer failed, surBufSize %{public}zu.", size);
364 
365     inputTimeStampUs_ = GetEncoderTimeStamp();
366     DHLOGD("Encoder input buffer size %{public}zu, timeStamp %{public}lld.", inputBuffer->Size(),
367         (long long)inputTimeStampUs_);
368     if (surfacebuffer->GetExtraData() == nullptr) {
369         DHLOGE("Surface buffer exist null extra data.");
370         return DCAMERA_BAD_OPERATE;
371     }
372     surfacebuffer->GetExtraData()->ExtraSet("timeStamp", inputTimeStampUs_);
373 
374     BufferFlushConfig flushConfig = { {0, 0, sourceConfig_.GetWidth(), sourceConfig_.GetHeight()}, 0};
375     SurfaceError ret = encodeProducerSurface_->FlushBuffer(surfacebuffer, -1, flushConfig);
376     CHECK_AND_RETURN_RET_LOG(ret != SURFACE_ERROR_OK, DCAMERA_BAD_OPERATE, "%s",
377         "Flush encoder input producer surface buffer failed.");
378     return DCAMERA_OK;
379 }
380 
GetEncoderInputSurfaceBuffer()381 sptr<SurfaceBuffer> EncodeDataProcess::GetEncoderInputSurfaceBuffer()
382 {
383     BufferRequestConfig requestConfig;
384     requestConfig.width = sourceConfig_.GetWidth();
385     requestConfig.height = sourceConfig_.GetHeight();
386     requestConfig.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA;
387     requestConfig.timeout = 0;
388     requestConfig.strideAlignment = ENCODER_STRIDE_ALIGNMENT;
389     switch (sourceConfig_.GetVideoformat()) {
390         case Videoformat::YUVI420:
391             requestConfig.format = PixelFormat::PIXEL_FMT_YCBCR_420_P;
392             break;
393         case Videoformat::NV12:
394             requestConfig.format = PixelFormat::PIXEL_FMT_YCBCR_420_SP;
395             break;
396         case Videoformat::NV21:
397             requestConfig.format = PixelFormat::PIXEL_FMT_YCRCB_420_SP;
398             break;
399         case Videoformat::RGBA_8888:
400             requestConfig.format = PixelFormat::PIXEL_FMT_RGBA_8888;
401             break;
402         default:
403             DHLOGE("The current pixel format does not support encoding.");
404             return nullptr;
405     }
406     sptr<SurfaceBuffer> surfacebuffer = nullptr;
407     int32_t flushFence = -1;
408     if (encodeProducerSurface_ == nullptr) {
409         DHLOGE("Encode producer surface is null.");
410         return nullptr;
411     }
412     GSError err = encodeProducerSurface_->RequestBuffer(surfacebuffer, flushFence, requestConfig);
413     if (err != GSERROR_OK || surfacebuffer == nullptr) {
414         DHLOGE("Request encoder input producer surface buffer failed, error code: %d.", err);
415     }
416     return surfacebuffer;
417 }
418 
GetEncoderTimeStamp()419 int64_t EncodeDataProcess::GetEncoderTimeStamp()
420 {
421     if (inputTimeStampUs_ != 0) {
422         lastFeedEncoderInputBufferTimeUs_ = inputTimeStampUs_;
423     }
424     const int64_t nsPerUs = 1000L;
425     int64_t nowTimeUs = GetNowTimeStampUs() * nsPerUs;
426     return nowTimeUs;
427 }
428 
IncreaseWaitEncodeCnt()429 void EncodeDataProcess::IncreaseWaitEncodeCnt()
430 {
431     std::lock_guard<std::mutex> lck(mtxHoldCount_);
432     if (lastFeedEncoderInputBufferTimeUs_ == 0) {
433         waitEncoderOutputCount_ += FIRST_FRAME_OUTPUT_NUM;
434     } else {
435         waitEncoderOutputCount_++;
436     }
437     DHLOGD("Wait encoder output frames number is %{public}d.", waitEncoderOutputCount_);
438 }
439 
ReduceWaitEncodeCnt()440 void EncodeDataProcess::ReduceWaitEncodeCnt()
441 {
442     std::lock_guard<std::mutex> lck(mtxHoldCount_);
443     if (waitEncoderOutputCount_ <= 0) {
444         DHLOGE("The waitEncoderOutputCount_ = %{public}d.", waitEncoderOutputCount_);
445     }
446     waitEncoderOutputCount_--;
447     DHLOGD("Wait encoder output frames number is %{public}d.", waitEncoderOutputCount_);
448 }
449 
GetEncoderOutputBuffer(uint32_t index,MediaAVCodec::AVCodecBufferInfo info,MediaAVCodec::AVCodecBufferFlag flag,std::shared_ptr<Media::AVSharedMemory> & buffer)450 int32_t EncodeDataProcess::GetEncoderOutputBuffer(uint32_t index, MediaAVCodec::AVCodecBufferInfo info,
451     MediaAVCodec::AVCodecBufferFlag flag, std::shared_ptr<Media::AVSharedMemory>& buffer)
452 {
453     DHLOGD("Get encoder output buffer.");
454     if (videoEncoder_ == nullptr) {
455         DHLOGE("The video encoder does not exist before output encoded data.");
456         return DCAMERA_BAD_VALUE;
457     }
458     if (buffer == nullptr) {
459         DHLOGE("Failed to get the output shared memory, index : %{public}u", index);
460         return DCAMERA_BAD_OPERATE;
461     }
462 
463     CHECK_AND_RETURN_RET_LOG(info.size <= 0 || info.size > DATABUFF_MAX_SIZE, DCAMERA_BAD_VALUE,
464         "AVCodecBufferInfo error, buffer size : %{public}d", info.size);
465     size_t outputMemoDataSize = static_cast<size_t>(info.size);
466     DHLOGD("Encoder output buffer size : %{public}zu", outputMemoDataSize);
467     std::shared_ptr<DataBuffer> bufferOutput = std::make_shared<DataBuffer>(outputMemoDataSize);
468     errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(),
469         buffer->GetBase(), outputMemoDataSize);
470     CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR, "%{public}s", "memcpy_s buffer failed.");
471     int64_t timeStamp = info.presentationTimeUs;
472     struct timespec time = {0, 0};
473     clock_gettime(CLOCK_MONOTONIC, &time);
474     int64_t timeNs = static_cast<int64_t>(time.tv_sec) * S2NS + static_cast<int64_t>(time.tv_nsec);
475     int64_t encodeT = (timeNs - timeStamp) / static_cast<int64_t>(US2NS);
476     int64_t finishEncodeT = GetNowTimeStampUs();
477     int64_t startEncodeT = finishEncodeT - encodeT;
478     bufferOutput->SetInt64(START_ENCODE_TIME_US, startEncodeT);
479     bufferOutput->SetInt64(FINISH_ENCODE_TIME_US, finishEncodeT);
480     bufferOutput->SetInt64(TIME_STAMP_US, timeStamp);
481     bufferOutput->SetInt32(FRAME_TYPE, flag);
482     bufferOutput->SetInt32(INDEX, index_);
483     index_++;
484     std::vector<std::shared_ptr<DataBuffer>> nextInputBuffers;
485     nextInputBuffers.push_back(bufferOutput);
486     return EncodeDone(nextInputBuffers);
487 }
488 
EncodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)489 int32_t EncodeDataProcess::EncodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
490 {
491     DHLOGD("Encoder done.");
492     if (outputBuffers.empty()) {
493         DHLOGE("The received data buffers is empty.");
494         return DCAMERA_BAD_VALUE;
495     }
496 
497     if (nextDataProcess_ != nullptr) {
498         DHLOGD("Send to the next node of the encoder for processing.");
499         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
500         CHECK_AND_LOG(err != DCAMERA_OK, "%{public}s", "Someone node after the encoder processes failed.");
501         return err;
502     }
503     DHLOGD("The current node is the last node, and Output the processed video buffer");
504     std::shared_ptr<DCameraPipelineSink> targetPipelineSink = callbackPipelineSink_.lock();
505     if (targetPipelineSink == nullptr) {
506         DHLOGE("callbackPipelineSink_ is nullptr.");
507         return DCAMERA_BAD_VALUE;
508     }
509     targetPipelineSink->OnProcessedVideoBuffer(outputBuffers[0]);
510     return DCAMERA_OK;
511 }
512 
OnError()513 void EncodeDataProcess::OnError()
514 {
515     DHLOGD("EncodeDataProcess : OnError.");
516     isEncoderProcess_.store(false);
517     if (videoEncoder_ != nullptr) {
518         videoEncoder_->Flush();
519         videoEncoder_->Stop();
520     }
521     std::shared_ptr<DCameraPipelineSink> targetPipelineSink = callbackPipelineSink_.lock();
522     CHECK_AND_RETURN_LOG(targetPipelineSink == nullptr, "%{public}s", "callbackPipelineSink_ is nullptr.");
523     targetPipelineSink->OnError(DataProcessErrorType::ERROR_PIPELINE_ENCODER);
524 }
525 
OnInputBufferAvailable(uint32_t index,std::shared_ptr<Media::AVSharedMemory> buffer)526 void EncodeDataProcess::OnInputBufferAvailable(uint32_t index, std::shared_ptr<Media::AVSharedMemory> buffer)
527 {
528     DHLOGD("The available input buffer index : %{public}u. No operation when using input.", index);
529 }
530 
OnOutputFormatChanged(const Media::Format & format)531 void EncodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
532 {
533     if (encodeOutputFormat_.GetFormatMap().empty()) {
534         DHLOGE("The first changed video encoder output format is null.");
535         return;
536     }
537     encodeOutputFormat_ = format;
538 }
539 
OnOutputBufferAvailable(uint32_t index,MediaAVCodec::AVCodecBufferInfo info,MediaAVCodec::AVCodecBufferFlag flag,std::shared_ptr<Media::AVSharedMemory> buffer)540 void EncodeDataProcess::OnOutputBufferAvailable(uint32_t index, MediaAVCodec::AVCodecBufferInfo info,
541     MediaAVCodec::AVCodecBufferFlag flag, std::shared_ptr<Media::AVSharedMemory> buffer)
542 {
543     if (!isEncoderProcess_.load()) {
544         DHLOGE("EncodeNode occurred error or start release.");
545         return;
546     }
547     DHLOGD("Video encode buffer info: presentation TimeUs %{public}" PRId64", size %{public}d, offset %{public}d, "
548         "flag %{public}d", info.presentationTimeUs, info.size, info.offset, flag);
549     int32_t err = GetEncoderOutputBuffer(index, info, flag, buffer);
550     if (err != DCAMERA_OK) {
551         DHLOGE("Get encode output Buffer failed.");
552         return;
553     }
554     CHECK_AND_RETURN_LOG(videoEncoder_ == nullptr, "%{public}s",
555         "The video encoder does not exist before release output buffer index.");
556     int32_t errRelease = videoEncoder_->ReleaseOutputBuffer(index);
557     CHECK_AND_LOG(errRelease != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK,
558         "The video encoder release output buffer failed, index : [%{public}u].", index);
559 }
560 
GetSourceConfig() const561 VideoConfigParams EncodeDataProcess::GetSourceConfig() const
562 {
563     return sourceConfig_;
564 }
565 
GetTargetConfig() const566 VideoConfigParams EncodeDataProcess::GetTargetConfig() const
567 {
568     return targetConfig_;
569 }
570 
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)571 int32_t EncodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
572 {
573     if (propertyName != surfaceStr_) {
574         return DCAMERA_OK;
575     }
576     CHECK_AND_RETURN_RET_LOG(encodeProducerSurface_ == nullptr, DCAMERA_BAD_VALUE, "%{public}s",
577         "EncodeDataProcess::GetProperty: encode dataProcess get property fail, encode surface is nullptr.");
578     encodeProducerSurface_->SetDefaultUsage(encodeProducerSurface_->GetDefaultUsage() & (~BUFFER_USAGE_VIDEO_ENCODER));
579     return propertyCarrier.CarrySurfaceProperty(encodeProducerSurface_);
580 }
581 } // namespace DistributedHardware
582 } // namespace OHOS
583