1 /*
2  * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "decode_data_process.h"
17 
18 #include "distributed_camera_constants.h"
19 #include "distributed_hardware_log.h"
20 #include "dcamera_hisysevent_adapter.h"
21 #include "dcamera_hidumper.h"
22 #include "decode_surface_listener.h"
23 #include "decode_video_callback.h"
24 #include "graphic_common_c.h"
25 #include <sys/prctl.h>
26 
27 namespace OHOS {
28 namespace DistributedHardware {
29 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
30     "YUVI420", "NV12", "NV21", "RGBA_8888"
31 };
32 
~DecodeDataProcess()33 DecodeDataProcess::~DecodeDataProcess()
34 {
35     DumpFileUtil::CloseDumpFile(&dumpDecBeforeFile_);
36     DumpFileUtil::CloseDumpFile(&dumpDecAfterFile_);
37     if (isDecoderProcess_.load()) {
38         DHLOGD("~DecodeDataProcess : ReleaseProcessNode.");
39         ReleaseProcessNode();
40     }
41 }
42 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)43 int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
44     VideoConfigParams& processedConfig)
45 {
46     DHLOGD("Init DCamera DecodeNode start.");
47     if (!(IsInDecoderRange(sourceConfig) && IsInDecoderRange(targetConfig))) {
48         DHLOGE("Source config or target config are invalid.");
49         return DCAMERA_BAD_VALUE;
50     }
51 
52     if (!IsConvertible(sourceConfig, targetConfig)) {
53         DHLOGE("The DecodeNode can't convert %{public}d to %{public}d.", sourceConfig.GetVideoCodecType(),
54             targetConfig_.GetVideoCodecType());
55         return DCAMERA_BAD_TYPE;
56     }
57 
58     sourceConfig_ = sourceConfig;
59     targetConfig_ = targetConfig;
60     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
61         DHLOGD("Disable DecodeNode. The target video codec type %{public}d is the same as the source video codec "
62             "type %{public}d.", targetConfig_.GetVideoCodecType(), sourceConfig_.GetVideoCodecType());
63         processedConfig_ = sourceConfig;
64         processedConfig = processedConfig_;
65         isDecoderProcess_.store(true);
66         return DCAMERA_OK;
67     }
68 
69     InitCodecEvent();
70     int32_t err = InitDecoder();
71     if (err != DCAMERA_OK) {
72         DHLOGE("Init video decoder failed.");
73         ReleaseProcessNode();
74         return err;
75     }
76     alignedHeight_ = GetAlignedHeight(sourceConfig_.GetHeight());
77     processedConfig = processedConfig_;
78     isDecoderProcess_.store(true);
79     return DCAMERA_OK;
80 }
81 
IsInDecoderRange(const VideoConfigParams & curConfig)82 bool DecodeDataProcess::IsInDecoderRange(const VideoConfigParams& curConfig)
83 {
84     return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH ||
85         curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT ||
86         curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE);
87 }
88 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)89 bool DecodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
90 {
91     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
92         targetConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
93 }
94 
InitCodecEvent()95 void DecodeDataProcess::InitCodecEvent()
96 {
97     DHLOGD("Init DecodeNode eventBus, and add handler for it.");
98     eventThread_ = std::thread([this]() { this->StartEventHandler(); });
99     std::unique_lock<std::mutex> lock(eventMutex_);
100     eventCon_.wait(lock, [this] {
101         return decEventHandler_ != nullptr;
102     });
103 }
104 
StartEventHandler()105 void DecodeDataProcess::StartEventHandler()
106 {
107     prctl(PR_SET_NAME, DECODE_DATA_EVENT.c_str());
108     auto runner = AppExecFwk::EventRunner::Create(false);
109     if (runner == nullptr) {
110         DHLOGE("Creat runner failed.");
111         return;
112     }
113     {
114         std::lock_guard<std::mutex> lock(eventMutex_);
115         decEventHandler_ = std::make_shared<AppExecFwk::EventHandler>(runner);
116     }
117     eventCon_.notify_one();
118     runner->Run();
119 }
120 
InitDecoder()121 int32_t DecodeDataProcess::InitDecoder()
122 {
123     DHLOGD("Init video decoder.");
124     int32_t ret = ConfigureVideoDecoder();
125     if (ret != DCAMERA_OK) {
126         DHLOGE("Init video decoder metadata format failed.");
127         return ret;
128     }
129 
130     ret = StartVideoDecoder();
131     if (ret != DCAMERA_OK) {
132         DHLOGE("Start Video decoder failed.");
133         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_DECODE_ERROR,
134             CreateMsg("start video decoder failed, width: %d, height: %d, format: %s",
135             sourceConfig_.GetWidth(), sourceConfig_.GetHeight(),
136             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
137         return ret;
138     }
139     return DCAMERA_OK;
140 }
141 
ConfigureVideoDecoder()142 int32_t DecodeDataProcess::ConfigureVideoDecoder()
143 {
144     int32_t ret = InitDecoderMetadataFormat();
145     if (ret != DCAMERA_OK) {
146         DHLOGE("Init video decoder metadata format failed. ret %{public}d.", ret);
147         return ret;
148     }
149 
150     videoDecoder_ = MediaAVCodec::VideoDecoderFactory::CreateByMime(processType_);
151     if (videoDecoder_ == nullptr) {
152         DHLOGE("Create video decoder failed.");
153         return DCAMERA_INIT_ERR;
154     }
155     decodeVideoCallback_ = std::make_shared<DecodeVideoCallback>(shared_from_this());
156     ret = videoDecoder_->SetCallback(decodeVideoCallback_);
157     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
158         DHLOGE("Set video decoder callback failed. ret %{public}d.", ret);
159         return DCAMERA_INIT_ERR;
160     }
161 
162     ret = videoDecoder_->Configure(metadataFormat_);
163     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
164         DHLOGE("Set video decoder metadata format failed. ret %{public}d.", ret);
165         return DCAMERA_INIT_ERR;
166     }
167 
168     ret = SetDecoderOutputSurface();
169     if (ret != DCAMERA_OK) {
170         DHLOGE("Set decoder outputsurface failed. ret %{public}d.", ret);
171         return ret;
172     }
173 
174     return DCAMERA_OK;
175 }
176 
InitDecoderMetadataFormat()177 int32_t DecodeDataProcess::InitDecoderMetadataFormat()
178 {
179     DHLOGI("Init video decoder metadata format. codecType: %{public}d", sourceConfig_.GetVideoCodecType());
180     processedConfig_ = sourceConfig_;
181     processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC);
182     switch (sourceConfig_.GetVideoCodecType()) {
183         case VideoCodecType::CODEC_H264:
184             processType_ = "video/avc";
185             break;
186         case VideoCodecType::CODEC_H265:
187             processType_ = "video/hevc";
188             break;
189         default:
190             DHLOGE("The current codec type does not support decoding.");
191             return DCAMERA_NOT_FOUND;
192     }
193 
194     DHLOGI("Init video decoder metadata format. videoformat: %{public}d", processedConfig_.GetVideoformat());
195     switch (processedConfig_.GetVideoformat()) {
196         case Videoformat::YUVI420:
197             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::YUVI420));
198             metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE);
199             break;
200         case Videoformat::NV12:
201             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV12));
202             metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE);
203             break;
204         case Videoformat::NV21:
205             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV21));
206             metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE);
207             break;
208         case Videoformat::RGBA_8888:
209             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::RGBA));
210             metadataFormat_.PutIntValue("max_input_size", MAX_RGB32_BUFFER_SIZE);
211             break;
212         default:
213             DHLOGE("The current pixel format does not support encoding.");
214             return DCAMERA_NOT_FOUND;
215     }
216 
217     metadataFormat_.PutStringValue("codec_mime", processType_);
218     metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth());
219     metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight());
220     metadataFormat_.PutDoubleValue("frame_rate", MAX_FRAME_RATE);
221 
222     return DCAMERA_OK;
223 }
224 
SetDecoderOutputSurface()225 int32_t DecodeDataProcess::SetDecoderOutputSurface()
226 {
227     DHLOGD("Set the video decoder output surface.");
228     if (videoDecoder_ == nullptr) {
229         DHLOGE("The video decoder is null.");
230         return DCAMERA_BAD_VALUE;
231     }
232 
233     decodeConsumerSurface_ = IConsumerSurface::Create();
234     if (decodeConsumerSurface_ == nullptr) {
235         DHLOGE("Create the decode consumer surface failed.");
236         return DCAMERA_INIT_ERR;
237     }
238     decodeConsumerSurface_->SetDefaultWidthAndHeight(static_cast<int32_t>(sourceConfig_.GetWidth()),
239         static_cast<int32_t>(sourceConfig_.GetHeight()));
240     GSError ret = decodeConsumerSurface_->SetDefaultUsage(SurfaceBufferUsage::BUFFER_USAGE_MEM_MMZ_CACHE |
241         SurfaceBufferUsage::BUFFER_USAGE_CPU_READ);
242     if (ret != GSERROR_OK || decodeConsumerSurface_ == nullptr) {
243         DHLOGE("Set Usage failed.");
244     }
245 
246     decodeSurfaceListener_ = new DecodeSurfaceListener(decodeConsumerSurface_, shared_from_this());
247     if (decodeConsumerSurface_->RegisterConsumerListener(decodeSurfaceListener_) !=
248         SURFACE_ERROR_OK) {
249         DHLOGE("Register consumer listener failed.");
250         return DCAMERA_INIT_ERR;
251     }
252 
253     sptr<IBufferProducer> surfaceProducer = decodeConsumerSurface_->GetProducer();
254     if (surfaceProducer == nullptr) {
255         DHLOGE("Get the surface producer of the decode consumer surface failed.");
256         return DCAMERA_INIT_ERR;
257     }
258     decodeProducerSurface_ = Surface::CreateSurfaceAsProducer(surfaceProducer);
259     if (decodeProducerSurface_ == nullptr) {
260         DHLOGE("Create the decode producer surface of the decode consumer surface failed.");
261         return DCAMERA_INIT_ERR;
262     }
263 
264     DHLOGD("Set the producer surface to video decoder output surface.");
265     int32_t err = videoDecoder_->SetOutputSurface(decodeProducerSurface_);
266     if (err != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
267         DHLOGE("Set decoder output surface failed.");
268         return DCAMERA_INIT_ERR;
269     }
270     return DCAMERA_OK;
271 }
272 
StartVideoDecoder()273 int32_t DecodeDataProcess::StartVideoDecoder()
274 {
275     if (videoDecoder_ == nullptr) {
276         DHLOGE("The video decoder does not exist before StartVideoDecoder.");
277         return DCAMERA_BAD_VALUE;
278     }
279 
280     int32_t ret = videoDecoder_->Prepare();
281     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
282         DHLOGE("Video decoder prepare failed. ret %{public}d.", ret);
283         return DCAMERA_INIT_ERR;
284     }
285     ret = videoDecoder_->Start();
286     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
287         DHLOGE("Video decoder start failed. ret %{public}d.", ret);
288         return DCAMERA_INIT_ERR;
289     }
290     return DCAMERA_OK;
291 }
292 
StopVideoDecoder()293 int32_t DecodeDataProcess::StopVideoDecoder()
294 {
295     if (videoDecoder_ == nullptr) {
296         DHLOGE("The video decoder does not exist before StopVideoDecoder.");
297         return DCAMERA_BAD_VALUE;
298     }
299 
300     bool isSuccess = true;
301     int32_t ret = videoDecoder_->Flush();
302     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
303         DHLOGE("VideoDecoder flush failed. ret %{public}d.", ret);
304         isSuccess = isSuccess && false;
305     }
306     ret = videoDecoder_->Stop();
307     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
308         DHLOGE("VideoDecoder stop failed. ret %{public}d.", ret);
309         isSuccess = isSuccess && false;
310     }
311     if (!isSuccess) {
312         return DCAMERA_BAD_OPERATE;
313     }
314     return DCAMERA_OK;
315 }
316 
ReleaseVideoDecoder()317 void DecodeDataProcess::ReleaseVideoDecoder()
318 {
319     DHLOGD("Start release videoDecoder.");
320     std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
321     std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
322     if (videoDecoder_ == nullptr) {
323         DHLOGE("The video decoder does not exist before ReleaseVideoDecoder.");
324         decodeVideoCallback_ = nullptr;
325         return;
326     }
327     int32_t ret = StopVideoDecoder();
328     if (ret != DCAMERA_OK) {
329         DHLOGE("StopVideoDecoder failed.");
330     }
331     ret = videoDecoder_->Release();
332     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
333         DHLOGE("VideoDecoder release failed. ret %{public}d.", ret);
334     }
335     videoDecoder_ = nullptr;
336     decodeVideoCallback_ = nullptr;
337 }
338 
ReleaseDecoderSurface()339 void DecodeDataProcess::ReleaseDecoderSurface()
340 {
341     if (decodeConsumerSurface_ == nullptr) {
342         decodeProducerSurface_ = nullptr;
343         DHLOGE("The decode consumer surface does not exist before UnregisterConsumerListener.");
344         return;
345     }
346     int32_t ret = decodeConsumerSurface_->UnregisterConsumerListener();
347     if (ret != SURFACE_ERROR_OK) {
348         DHLOGE("Unregister consumer listener failed. ret %{public}d.", ret);
349     }
350     decodeConsumerSurface_ = nullptr;
351     decodeProducerSurface_ = nullptr;
352 }
353 
ReleaseCodecEvent()354 void DecodeDataProcess::ReleaseCodecEvent()
355 {
356     if ((decEventHandler_ != nullptr) && (decEventHandler_->GetEventRunner() != nullptr)) {
357         decEventHandler_->GetEventRunner()->Stop();
358         eventThread_.join();
359     }
360     decEventHandler_ = nullptr;
361     pipeSrcEventHandler_ = nullptr;
362     DHLOGD("Release DecodeNode eventBusDecode and eventBusPipeline end.");
363 }
364 
ReleaseProcessNode()365 void DecodeDataProcess::ReleaseProcessNode()
366 {
367     DHLOGD("Start release [%{public}zu] node : DecodeNode.", nodeRank_);
368     isDecoderProcess_.store(false);
369     ReleaseVideoDecoder();
370     ReleaseDecoderSurface();
371     ReleaseCodecEvent();
372 
373     processType_ = "";
374     std::queue<std::shared_ptr<DataBuffer>>().swap(inputBuffersQueue_);
375     std::queue<uint32_t>().swap(availableInputIndexsQueue_);
376     std::queue<std::shared_ptr<Media::AVSharedMemory>>().swap(availableInputBufferQueue_);
377     std::deque<DCameraFrameInfo>().swap(frameInfoDeque_);
378     waitDecoderOutputCount_ = 0;
379     lastFeedDecoderInputBufferTimeUs_ = 0;
380     outputTimeStampUs_ = 0;
381     alignedHeight_ = 0;
382 
383     if (nextDataProcess_ != nullptr) {
384         nextDataProcess_->ReleaseProcessNode();
385         nextDataProcess_ = nullptr;
386     }
387     DHLOGD("Release [%{public}zu] node : DecodeNode end.", nodeRank_);
388 }
389 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)390 int32_t DecodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
391 {
392     DHLOGD("Process data in DecodeDataProcess.");
393     if (inputBuffers.empty() || inputBuffers[0] == nullptr) {
394         DHLOGE("The input data buffers is empty.");
395         return DCAMERA_BAD_VALUE;
396     }
397     DumpFileUtil::OpenDumpFile(DUMP_SERVER_PARA, DUMP_DCAMERA_BEFORE_DEC_FILENAME, &dumpDecBeforeFile_);
398     DumpFileUtil::OpenDumpFile(DUMP_SERVER_PARA, DUMP_DCAMERA_AFTER_DEC_FILENAME, &dumpDecAfterFile_);
399     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
400         DHLOGD("The target VideoCodecType : %{public}d is the same as the source VideoCodecType : %{public}d.",
401             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
402         return DecodeDone(inputBuffers);
403     }
404 
405     if (videoDecoder_ == nullptr) {
406         DHLOGE("The video decoder does not exist before decoding data.");
407         return DCAMERA_INIT_ERR;
408     }
409     if (inputBuffersQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
410         DHLOGE("video decoder input buffers queue over flow.");
411         return DCAMERA_INDEX_OVERFLOW;
412     }
413     if (inputBuffers[0]->Size() > MAX_BUFFER_SIZE) {
414         DHLOGE("DecodeNode input buffer size %{public}zu error.", inputBuffers[0]->Size());
415         return DCAMERA_MEMORY_OPT_ERROR;
416     }
417     if (!isDecoderProcess_.load()) {
418         DHLOGE("Decoder node occurred error or start release.");
419         return DCAMERA_DISABLE_PROCESS;
420     }
421     inputBuffersQueue_.push(inputBuffers[0]);
422     DHLOGD("Push inputBuf sucess. BufSize %{public}zu, QueueSize %{public}zu.", inputBuffers[0]->Size(),
423         inputBuffersQueue_.size());
424     int32_t err = FeedDecoderInputBuffer();
425     if (err != DCAMERA_OK) {
426         int32_t sleepTimeUs = 5000;
427         std::this_thread::sleep_for(std::chrono::microseconds(sleepTimeUs));
428         DHLOGD("Feed decoder input buffer failed. Try FeedDecoderInputBuffer again.");
429         auto sendFunc = [this]() mutable {
430             int32_t ret = FeedDecoderInputBuffer();
431             DHLOGD("excute FeedDecoderInputBuffer ret %{public}d.", ret);
432         };
433         CHECK_AND_RETURN_RET_LOG(pipeSrcEventHandler_ == nullptr, DCAMERA_BAD_VALUE,
434             "%{public}s", "pipeSrcEventHandler_ is nullptr.");
435         pipeSrcEventHandler_->PostTask(sendFunc);
436     }
437     return DCAMERA_OK;
438 }
439 
BeforeDecodeDump(uint8_t * buffer,size_t bufSize)440 void DecodeDataProcess::BeforeDecodeDump(uint8_t *buffer, size_t bufSize)
441 {
442 #ifdef DUMP_DCAMERA_FILE
443     if (buffer == nullptr) {
444         DHLOGE("dumpsaving : input param nullptr.");
445         return;
446     }
447     if (DcameraHidumper::GetInstance().GetDumpFlag() && (IsUnderDumpMaxSize(DUMP_PATH + BEFORE_DECODE) == DCAMERA_OK)) {
448         DumpBufferToFile(DUMP_PATH + BEFORE_DECODE, buffer, bufSize);
449     }
450 #endif
451     return;
452 }
453 
FeedDecoderInputBuffer()454 int32_t DecodeDataProcess::FeedDecoderInputBuffer()
455 {
456     DHLOGD("Feed decoder input buffer.");
457     while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_.load())) {
458         std::shared_ptr<DataBuffer> buffer = inputBuffersQueue_.front();
459         if (buffer == nullptr || availableInputIndexsQueue_.empty() || availableInputBufferQueue_.empty()) {
460             DHLOGE("inputBuffersQueue size %{public}zu, availableInputIndexsQueue size %{public}zu, "
461                 "availableInputBufferQueue size %{public}zu",
462                 inputBuffersQueue_.size(), availableInputIndexsQueue_.size(), availableInputBufferQueue_.size());
463             return DCAMERA_BAD_VALUE;
464         }
465         buffer->frameInfo_.timePonit.startDecode = GetNowTimeStampUs();
466         {
467             std::lock_guard<std::mutex> lock(mtxDequeLock_);
468             frameInfoDeque_.push_back(buffer->frameInfo_);
469         }
470         int64_t timeStamp = buffer->frameInfo_.pts;
471         {
472             std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
473             CHECK_AND_RETURN_RET_LOG(
474                 videoDecoder_ == nullptr, DCAMERA_OK, "The video decoder does not exist before GetInputBuffer.");
475             uint32_t index = availableInputIndexsQueue_.front();
476             std::shared_ptr<Media::AVSharedMemory> sharedMemoryInput = availableInputBufferQueue_.front();
477             if (sharedMemoryInput == nullptr) {
478                 DHLOGE("Failed to obtain the input shared memory corresponding to the [%{public}u] index.", index);
479                 return DCAMERA_BAD_VALUE;
480             }
481             BeforeDecodeDump(buffer->Data(), buffer->Size());
482             DumpFileUtil::WriteDumpFile(dumpDecBeforeFile_, static_cast<void *>(buffer->Data()), buffer->Size());
483             size_t inputMemoDataSize = static_cast<size_t>(sharedMemoryInput->GetSize());
484             errno_t err = memcpy_s(sharedMemoryInput->GetBase(), inputMemoDataSize, buffer->Data(), buffer->Size());
485             CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR, "memcpy_s buffer failed.");
486             DHLOGD("Decoder input buffer size %{public}zu, timeStamp %{public}" PRId64"us.", buffer->Size(), timeStamp);
487             MediaAVCodec::AVCodecBufferInfo bufferInfo {timeStamp, static_cast<int32_t>(buffer->Size()), 0};
488             int32_t ret = videoDecoder_->QueueInputBuffer(index, bufferInfo,
489                 MediaAVCodec::AVCODEC_BUFFER_FLAG_NONE);
490             if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
491                 DHLOGE("queue Input buffer failed.");
492                 return DCAMERA_BAD_OPERATE;
493             }
494         }
495 
496         inputBuffersQueue_.pop();
497         DHLOGD("Push inputBuffer sucess. inputBuffersQueue size is %{public}zu.", inputBuffersQueue_.size());
498 
499         IncreaseWaitDecodeCnt();
500     }
501     return DCAMERA_OK;
502 }
503 
GetDecoderTimeStamp()504 int64_t DecodeDataProcess::GetDecoderTimeStamp()
505 {
506     int64_t TimeIntervalStampUs = 0;
507     int64_t nowTimeUs = GetNowTimeStampUs();
508     if (lastFeedDecoderInputBufferTimeUs_ == 0) {
509         lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
510         return TimeIntervalStampUs;
511     }
512     TimeIntervalStampUs = nowTimeUs - lastFeedDecoderInputBufferTimeUs_;
513     lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
514     return TimeIntervalStampUs;
515 }
516 
IncreaseWaitDecodeCnt()517 void DecodeDataProcess::IncreaseWaitDecodeCnt()
518 {
519     std::lock_guard<std::mutex> lck(mtxHoldCount_);
520     availableInputIndexsQueue_.pop();
521     availableInputBufferQueue_.pop();
522     waitDecoderOutputCount_++;
523     DHLOGD("Wait decoder output frames number is %{public}d.", waitDecoderOutputCount_);
524 }
525 
ReduceWaitDecodeCnt()526 void DecodeDataProcess::ReduceWaitDecodeCnt()
527 {
528     std::lock_guard<std::mutex> lck(mtxHoldCount_);
529     if (waitDecoderOutputCount_ <= 0) {
530         DHLOGE("The waitDecoderOutputCount_ = %{public}d.", waitDecoderOutputCount_);
531     }
532     if (outputTimeStampUs_ == 0) {
533         waitDecoderOutputCount_ -= FIRST_FRAME_INPUT_NUM;
534     } else {
535         waitDecoderOutputCount_--;
536     }
537     DHLOGD("Wait decoder output frames number is %{public}d.", waitDecoderOutputCount_);
538 }
539 
OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface> & surface)540 void DecodeDataProcess::OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface>& surface)
541 {
542     auto sendFunc = [this, surface]() mutable {
543         GetDecoderOutputBuffer(surface);
544         DHLOGD("excute GetDecoderOutputBuffer.");
545     };
546     if (decEventHandler_ != nullptr) {
547         decEventHandler_->PostTask(sendFunc);
548     }
549 }
550 
GetDecoderOutputBuffer(const sptr<IConsumerSurface> & surface)551 void DecodeDataProcess::GetDecoderOutputBuffer(const sptr<IConsumerSurface>& surface)
552 {
553     DHLOGD("Get decoder output buffer.");
554     if (surface == nullptr) {
555         DHLOGE("Get decode consumer surface failed.");
556         return;
557     }
558     Rect damage = {0, 0, 0, 0};
559     int32_t acquireFence = 0;
560     int64_t timeStamp = 0;
561     sptr<SurfaceBuffer> surfaceBuffer = nullptr;
562     GSError ret = surface->AcquireBuffer(surfaceBuffer, acquireFence, timeStamp, damage);
563     if (ret != GSERROR_OK || surfaceBuffer == nullptr) {
564         DHLOGE("Acquire surface buffer failed!");
565         return;
566     }
567     int32_t alignedWidth = surfaceBuffer->GetStride();
568     if (surfaceBuffer->GetSize() > BUFFER_MAX_SIZE || alignedWidth > ALIGNED_WIDTH_MAX_SIZE) {
569         DHLOGE("surface buffer size or alignedWidth too long");
570         return;
571     }
572     int32_t alignedHeight = alignedHeight_;
573     DHLOGD("OutputBuffer alignedWidth %{public}d, alignedHeight %{public}d, timeStamp %{public}" PRId64" ns.",
574         alignedWidth, alignedHeight, timeStamp);
575     CopyDecodedImage(surfaceBuffer, alignedWidth, alignedHeight);
576     surface->ReleaseBuffer(surfaceBuffer, -1);
577     outputTimeStampUs_ = timeStamp;
578     ReduceWaitDecodeCnt();
579 }
580 
CopyDecodedImage(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)581 void DecodeDataProcess::CopyDecodedImage(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
582     int32_t alignedHeight)
583 {
584     if (!IsCorrectSurfaceBuffer(surBuf, alignedWidth, alignedHeight)) {
585         DHLOGE("Surface output buffer error.");
586         return;
587     }
588 
589     size_t imageSize = 0;
590     if (processedConfig_.GetVideoformat() == Videoformat::RGBA_8888) {
591         imageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
592             RGB32_MEMORY_COEFFICIENT);
593     } else {
594         imageSize = static_cast<size_t>(
595             sourceConfig_.GetWidth() * sourceConfig_.GetHeight() * YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
596     }
597     std::shared_ptr<DataBuffer> bufferOutput = std::make_shared<DataBuffer>(imageSize);
598     uint8_t *addr = static_cast<uint8_t *>(surBuf->GetVirAddr());
599     errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(), addr, imageSize);
600     if (err != EOK) {
601         DHLOGE("memcpy_s surface buffer failed.");
602         return;
603     }
604     {
605         std::lock_guard<std::mutex> lock(mtxDequeLock_);
606         bufferOutput->frameInfo_ = frameInfoDeque_.front();
607         frameInfoDeque_.pop_front();
608     }
609     bufferOutput->SetInt32("Videoformat", static_cast<int32_t>(processedConfig_.GetVideoformat()));
610     bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth());
611     bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight());
612     bufferOutput->SetInt32("width", processedConfig_.GetWidth());
613     bufferOutput->SetInt32("height", processedConfig_.GetHeight());
614 #ifdef DUMP_DCAMERA_FILE
615     std::string fileName = "SourceAfterDecode_width(" + std::to_string(processedConfig_.GetWidth())
616         + ")height(" + std::to_string(processedConfig_.GetHeight()) + ").yuv";
617     if (DcameraHidumper::GetInstance().GetDumpFlag() && (IsUnderDumpMaxSize(DUMP_PATH + fileName) == DCAMERA_OK)) {
618         DumpBufferToFile(DUMP_PATH + fileName, bufferOutput->Data(), bufferOutput->Size());
619     }
620 #endif
621     DumpFileUtil::WriteDumpFile(dumpDecAfterFile_, static_cast<void *>(bufferOutput->Data()), bufferOutput->Size());
622     PostOutputDataBuffers(bufferOutput);
623 }
624 
IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)625 bool DecodeDataProcess::IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
626     int32_t alignedHeight)
627 {
628     if (surBuf == nullptr) {
629         DHLOGE("surface buffer is null!");
630         return false;
631     }
632 
633     if (processedConfig_.GetVideoformat() == Videoformat::RGBA_8888) {
634         size_t rgbImageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
635             RGB32_MEMORY_COEFFICIENT);
636         size_t surfaceBufSize = static_cast<size_t>(surBuf->GetSize());
637         if (rgbImageSize > surfaceBufSize) {
638             DHLOGE("Buffer size error, rgbImageSize %{public}zu, surBufSize %{public}" PRIu32, rgbImageSize,
639                 surBuf->GetSize());
640             return false;
641         }
642     } else {
643         size_t surfaceBufSize = static_cast<size_t>(surBuf->GetSize());
644         size_t yuvImageAlignedSize = static_cast<size_t>(
645             alignedWidth * alignedHeight * YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
646         size_t yuvImageSize = static_cast<size_t>(
647             sourceConfig_.GetWidth() * sourceConfig_.GetHeight() * YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
648         if (yuvImageAlignedSize > surfaceBufSize || yuvImageAlignedSize < yuvImageSize) {
649             DHLOGE("Buffer size error, yuvImageSize %{public}zu, yuvImageAlignedSize %{public}zu, surBufSize "
650                 "%{public}" PRIu32, yuvImageSize, yuvImageAlignedSize, surBuf->GetSize());
651             return false;
652         }
653     }
654     return true;
655 }
656 
PostOutputDataBuffers(std::shared_ptr<DataBuffer> & outputBuffer)657 void DecodeDataProcess::PostOutputDataBuffers(std::shared_ptr<DataBuffer>& outputBuffer)
658 {
659     if (decEventHandler_ == nullptr || outputBuffer == nullptr) {
660         DHLOGE("decEventHandler_ or outputBuffer is null.");
661         return;
662     }
663     auto sendFunc = [this, outputBuffer]() mutable {
664         std::vector<std::shared_ptr<DataBuffer>> multiDataBuffers;
665         multiDataBuffers.push_back(outputBuffer);
666         int32_t ret = DecodeDone(multiDataBuffers);
667         DHLOGD("excute DecodeDone ret %{public}d.", ret);
668     };
669     if (decEventHandler_ != nullptr) {
670         decEventHandler_->PostTask(sendFunc);
671     }
672     DHLOGD("Send video decoder output asynchronous DCameraCodecEvents success.");
673 }
674 
DecodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)675 int32_t DecodeDataProcess::DecodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
676 {
677     DHLOGD("Decoder Done.");
678     if (outputBuffers.empty()) {
679         DHLOGE("The received data buffers is empty.");
680         return DCAMERA_BAD_VALUE;
681     }
682 
683     if (nextDataProcess_ != nullptr) {
684         DHLOGD("Send to the next node of the decoder for processing.");
685         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
686         if (err != DCAMERA_OK) {
687             DHLOGE("Someone node after the decoder processes failed.");
688         }
689         return err;
690     }
691     DHLOGD("The current node is the last node, and Output the processed video buffer");
692     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
693     if (targetPipelineSource == nullptr) {
694         DHLOGE("callbackPipelineSource_ is nullptr.");
695         return DCAMERA_BAD_VALUE;
696     }
697     targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]);
698     return DCAMERA_OK;
699 }
700 
OnError()701 void DecodeDataProcess::OnError()
702 {
703     DHLOGD("DecodeDataProcess : OnError.");
704     isDecoderProcess_.store(false);
705     if (videoDecoder_ != nullptr) {
706         videoDecoder_->Stop();
707     }
708     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
709     if (targetPipelineSource == nullptr) {
710         DHLOGE("callbackPipelineSource_ is nullptr.");
711         return;
712     }
713     targetPipelineSource->OnError(DataProcessErrorType::ERROR_PIPELINE_DECODER);
714 }
715 
OnInputBufferAvailable(uint32_t index,std::shared_ptr<Media::AVSharedMemory> buffer)716 void DecodeDataProcess::OnInputBufferAvailable(uint32_t index, std::shared_ptr<Media::AVSharedMemory> buffer)
717 {
718     DHLOGD("DecodeDataProcess::OnInputBufferAvailable");
719     std::lock_guard<std::mutex> lck(mtxHoldCount_);
720     if (availableInputIndexsQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
721         DHLOGE("Video decoder available indexs queue overflow.");
722         return;
723     }
724     DHLOGD("Video decoder available indexs queue push index [%{public}u].", index);
725     availableInputIndexsQueue_.push(index);
726     availableInputBufferQueue_.push(buffer);
727 }
728 
OnOutputFormatChanged(const Media::Format & format)729 void DecodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
730 {
731     if (decodeOutputFormat_.GetFormatMap().empty()) {
732         DHLOGE("The first changed video decoder output format is null.");
733         return;
734     }
735     decodeOutputFormat_ = format;
736 }
737 
OnOutputBufferAvailable(uint32_t index,const MediaAVCodec::AVCodecBufferInfo & info,const MediaAVCodec::AVCodecBufferFlag & flag,std::shared_ptr<Media::AVSharedMemory> buffer)738 void DecodeDataProcess::OnOutputBufferAvailable(uint32_t index, const MediaAVCodec::AVCodecBufferInfo& info,
739     const MediaAVCodec::AVCodecBufferFlag& flag, std::shared_ptr<Media::AVSharedMemory> buffer)
740 {
741     int64_t finishDecodeT = GetNowTimeStampUs();
742     if (!isDecoderProcess_.load()) {
743         DHLOGE("Decoder node occurred error or start release.");
744         return;
745     }
746     DHLOGD("Video decode buffer info: presentation TimeUs %{public}" PRId64", size %{public}d, offset %{public}d, flag "
747         "%{public}" PRIu32, info.presentationTimeUs, info.size, info.offset, flag);
748     outputInfo_ = info;
749     {
750         std::lock_guard<std::mutex> lock(mtxDequeLock_);
751         AlignFirstFrameTime();
752         for (auto it = frameInfoDeque_.begin(); it != frameInfoDeque_.end(); it++) {
753             DCameraFrameInfo frameInfo = *it;
754             if (frameInfo.timePonit.finishDecode != 0) {
755                 continue;
756             }
757             frameInfo.timePonit.finishDecode = finishDecodeT;
758             frameInfoDeque_.emplace(frameInfoDeque_.erase(it), frameInfo);
759             break;
760         }
761     }
762     {
763         std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
764         if (videoDecoder_ == nullptr) {
765             DHLOGE("The video decoder does not exist before decoding data.");
766             return;
767         }
768         int32_t errRelease = videoDecoder_->ReleaseOutputBuffer(index, true);
769         if (errRelease != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
770             DHLOGE("The video decoder output decoded data to surfacebuffer failed, index : [%{public}u].", index);
771         }
772     }
773 }
774 
GetSourceConfig() const775 VideoConfigParams DecodeDataProcess::GetSourceConfig() const
776 {
777     return sourceConfig_;
778 }
779 
GetTargetConfig() const780 VideoConfigParams DecodeDataProcess::GetTargetConfig() const
781 {
782     return targetConfig_;
783 }
784 
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)785 int32_t DecodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
786 {
787     return DCAMERA_OK;
788 }
789 
AlignFirstFrameTime()790 void DecodeDataProcess::AlignFirstFrameTime()
791 {
792     if (frameInfoDeque_.empty()) {
793         return;
794     }
795     DCameraFrameInfo frameInfo = frameInfoDeque_.front();
796     if (frameInfo.index != FRAME_HEAD || frameInfo.type != MediaAVCodec::AVCODEC_BUFFER_FLAG_CODEC_DATA) {
797         return;
798     }
799     frameInfoDeque_.pop_front();
800     DCameraFrameInfo front = frameInfoDeque_.front();
801     frameInfo.index = front.index;
802     frameInfo.pts = front.pts;
803     frameInfo.offset = front.offset;
804     frameInfo.type = front.type;
805     frameInfo.ver = front.ver;
806     frameInfo.timePonit.finishEncode = front.timePonit.finishEncode;
807     frameInfoDeque_.emplace(frameInfoDeque_.erase(frameInfoDeque_.begin()), frameInfo);
808 }
809 } // namespace DistributedHardware
810 } // namespace OHOS
811