1 /*
2  * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "decode_data_process.h"
17 
18 #include "distributed_camera_constants.h"
19 #include "distributed_hardware_log.h"
20 #include "dcamera_hisysevent_adapter.h"
21 #include "dcamera_hidumper.h"
22 #include "decode_surface_listener.h"
23 #include "decode_video_callback.h"
24 #include "graphic_common_c.h"
25 #include <sys/prctl.h>
26 
27 namespace OHOS {
28 namespace DistributedHardware {
29 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
30     "YUVI420", "NV12", "NV21", "RGBA_8888"
31 };
32 
~DecodeDataProcess()33 DecodeDataProcess::~DecodeDataProcess()
34 {
35     DumpFileUtil::CloseDumpFile(&dumpDecBeforeFile_);
36     DumpFileUtil::CloseDumpFile(&dumpDecAfterFile_);
37     if (isDecoderProcess_.load()) {
38         DHLOGD("~DecodeDataProcess : ReleaseProcessNode.");
39         ReleaseProcessNode();
40     }
41 }
42 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)43 int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
44     VideoConfigParams& processedConfig)
45 {
46     DHLOGD("Init DCamera DecodeNode start.");
47     if (!(IsInDecoderRange(sourceConfig) && IsInDecoderRange(targetConfig))) {
48         DHLOGE("Source config or target config are invalid.");
49         return DCAMERA_BAD_VALUE;
50     }
51 
52     if (!IsConvertible(sourceConfig, targetConfig)) {
53         DHLOGE("The DecodeNode can't convert %{public}d to %{public}d.", sourceConfig.GetVideoCodecType(),
54             targetConfig_.GetVideoCodecType());
55         return DCAMERA_BAD_TYPE;
56     }
57 
58     sourceConfig_ = sourceConfig;
59     targetConfig_ = targetConfig;
60     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
61         DHLOGD("Disable DecodeNode. The target video codec type %{public}d is the same as the source video codec "
62             "type %{public}d.", targetConfig_.GetVideoCodecType(), sourceConfig_.GetVideoCodecType());
63         processedConfig_ = sourceConfig;
64         processedConfig = processedConfig_;
65         isDecoderProcess_.store(true);
66         return DCAMERA_OK;
67     }
68 
69     InitCodecEvent();
70     int32_t err = InitDecoder();
71     if (err != DCAMERA_OK) {
72         DHLOGE("Init video decoder failed.");
73         ReleaseProcessNode();
74         return err;
75     }
76     alignedHeight_ = GetAlignedHeight(sourceConfig_.GetHeight());
77     processedConfig = processedConfig_;
78     isDecoderProcess_.store(true);
79     return DCAMERA_OK;
80 }
81 
IsInDecoderRange(const VideoConfigParams & curConfig)82 bool DecodeDataProcess::IsInDecoderRange(const VideoConfigParams& curConfig)
83 {
84     return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH ||
85         curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT ||
86         curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE);
87 }
88 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)89 bool DecodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
90 {
91     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
92         targetConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
93 }
94 
InitCodecEvent()95 void DecodeDataProcess::InitCodecEvent()
96 {
97     DHLOGD("Init DecodeNode eventBus, and add handler for it.");
98     eventThread_ = std::thread([this]() { this->StartEventHandler(); });
99     std::unique_lock<std::mutex> lock(eventMutex_);
100     eventCon_.wait(lock, [this] {
101         return decEventHandler_ != nullptr;
102     });
103 }
104 
StartEventHandler()105 void DecodeDataProcess::StartEventHandler()
106 {
107     prctl(PR_SET_NAME, DECODE_DATA_EVENT.c_str());
108     auto runner = AppExecFwk::EventRunner::Create(false);
109     if (runner == nullptr) {
110         DHLOGE("Creat runner failed.");
111         return;
112     }
113     {
114         std::lock_guard<std::mutex> lock(eventMutex_);
115         decEventHandler_ = std::make_shared<AppExecFwk::EventHandler>(runner);
116     }
117     eventCon_.notify_one();
118     runner->Run();
119 }
120 
InitDecoder()121 int32_t DecodeDataProcess::InitDecoder()
122 {
123     DHLOGD("Init video decoder.");
124     int32_t ret = ConfigureVideoDecoder();
125     if (ret != DCAMERA_OK) {
126         DHLOGE("Init video decoder metadata format failed.");
127         return ret;
128     }
129 
130     ret = StartVideoDecoder();
131     if (ret != DCAMERA_OK) {
132         DHLOGE("Start Video decoder failed.");
133         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_DECODE_ERROR,
134             CreateMsg("start video decoder failed, width: %d, height: %d, format: %s",
135             sourceConfig_.GetWidth(), sourceConfig_.GetHeight(),
136             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
137         return ret;
138     }
139     return DCAMERA_OK;
140 }
141 
ConfigureVideoDecoder()142 int32_t DecodeDataProcess::ConfigureVideoDecoder()
143 {
144     int32_t ret = InitDecoderMetadataFormat();
145     if (ret != DCAMERA_OK) {
146         DHLOGE("Init video decoder metadata format failed. ret %{public}d.", ret);
147         return ret;
148     }
149 
150     videoDecoder_ = MediaAVCodec::VideoDecoderFactory::CreateByMime(processType_);
151     CHECK_AND_RETURN_RET_LOG(videoDecoder_ == nullptr, DCAMERA_INIT_ERR, "%{public}s",
152         "Create video decoder failed.");
153     decodeVideoCallback_ = std::make_shared<DecodeVideoCallback>(shared_from_this());
154     ret = videoDecoder_->SetCallback(decodeVideoCallback_);
155     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
156         "Set video decoder callback failed.  ret %{public}d.", ret);
157     ret = videoDecoder_->Configure(metadataFormat_);
158     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
159         "Set video decoder metadata format failed. ret %{public}d.", ret);
160     ret = SetDecoderOutputSurface();
161     CHECK_AND_RETURN_RET_LOG(ret != DCAMERA_OK, ret,
162         "Set decoder output surface failed. ret %d.", ret);
163     return DCAMERA_OK;
164 }
165 
InitDecoderMetadataFormat()166 int32_t DecodeDataProcess::InitDecoderMetadataFormat()
167 {
168     DHLOGI("Init video decoder metadata format. codecType: %{public}d", sourceConfig_.GetVideoCodecType());
169     processedConfig_ = sourceConfig_;
170     processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC);
171     processedConfig_.SetVideoformat(Videoformat::YUVI420);
172     switch (sourceConfig_.GetVideoCodecType()) {
173         case VideoCodecType::CODEC_H264:
174             processType_ = "video/avc";
175             break;
176         case VideoCodecType::CODEC_H265:
177             processType_ = "video/hevc";
178             break;
179         default:
180             DHLOGE("The current codec type does not support decoding.");
181             return DCAMERA_NOT_FOUND;
182     }
183 
184     metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV12));
185     metadataFormat_.PutStringValue("codec_mime", processType_);
186     metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth());
187     metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight());
188     metadataFormat_.PutDoubleValue("frame_rate", MAX_FRAME_RATE);
189 
190     return DCAMERA_OK;
191 }
192 
SetDecoderOutputSurface()193 int32_t DecodeDataProcess::SetDecoderOutputSurface()
194 {
195     DHLOGD("Set the video decoder output surface.");
196     if (videoDecoder_ == nullptr) {
197         DHLOGE("The video decoder is null.");
198         return DCAMERA_BAD_VALUE;
199     }
200 
201     decodeConsumerSurface_ = IConsumerSurface::Create();
202     if (decodeConsumerSurface_ == nullptr) {
203         DHLOGE("Create the decode consumer surface failed.");
204         return DCAMERA_INIT_ERR;
205     }
206     decodeConsumerSurface_->SetDefaultWidthAndHeight(static_cast<int32_t>(sourceConfig_.GetWidth()),
207         static_cast<int32_t>(sourceConfig_.GetHeight()));
208     GSError ret = decodeConsumerSurface_->SetDefaultUsage(SurfaceBufferUsage::BUFFER_USAGE_MEM_MMZ_CACHE |
209         SurfaceBufferUsage::BUFFER_USAGE_CPU_READ);
210     CHECK_AND_LOG(ret != GSERROR_OK || decodeConsumerSurface_ == nullptr, "%{public}s", "Set Usage failed.");
211 
212     decodeSurfaceListener_ = new DecodeSurfaceListener(decodeConsumerSurface_, shared_from_this());
213     if (decodeConsumerSurface_->RegisterConsumerListener(decodeSurfaceListener_) !=
214         SURFACE_ERROR_OK) {
215         DHLOGE("Register consumer listener failed.");
216         return DCAMERA_INIT_ERR;
217     }
218 
219     sptr<IBufferProducer> surfaceProducer = decodeConsumerSurface_->GetProducer();
220     CHECK_AND_RETURN_RET_LOG(surfaceProducer == nullptr, DCAMERA_INIT_ERR, "%{public}s",
221         "Get the surface producer of the decode consumer surface failed.");
222     decodeProducerSurface_ = Surface::CreateSurfaceAsProducer(surfaceProducer);
223     if (decodeProducerSurface_ == nullptr) {
224         DHLOGE("Create the decode producer surface of the decode consumer surface failed.");
225         return DCAMERA_INIT_ERR;
226     }
227 
228     DHLOGD("Set the producer surface to video decoder output surface.");
229     int32_t err = videoDecoder_->SetOutputSurface(decodeProducerSurface_);
230     CHECK_AND_RETURN_RET_LOG(err != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR, "%{public}s",
231         "Set decoder output surface failed.");
232     return DCAMERA_OK;
233 }
234 
StartVideoDecoder()235 int32_t DecodeDataProcess::StartVideoDecoder()
236 {
237     if (videoDecoder_ == nullptr) {
238         DHLOGE("The video decoder does not exist before StartVideoDecoder.");
239         return DCAMERA_BAD_VALUE;
240     }
241 
242     int32_t ret = videoDecoder_->Prepare();
243     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
244         "Video decoder prepare failed. ret %{public}d.", ret);
245     ret = videoDecoder_->Start();
246     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
247         "Video decoder start failed. ret %{public}d.", ret);
248     return DCAMERA_OK;
249 }
250 
StopVideoDecoder()251 int32_t DecodeDataProcess::StopVideoDecoder()
252 {
253     if (videoDecoder_ == nullptr) {
254         DHLOGE("The video decoder does not exist before StopVideoDecoder.");
255         return DCAMERA_BAD_VALUE;
256     }
257 
258     bool isSuccess = true;
259     int32_t ret = videoDecoder_->Flush();
260     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
261         DHLOGE("VideoDecoder flush failed. ret %{public}d.", ret);
262         isSuccess = isSuccess && false;
263     }
264     ret = videoDecoder_->Stop();
265     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
266         DHLOGE("VideoDecoder stop failed. ret %{public}d.", ret);
267         isSuccess = isSuccess && false;
268     }
269     if (!isSuccess) {
270         return DCAMERA_BAD_OPERATE;
271     }
272     return DCAMERA_OK;
273 }
274 
ReleaseVideoDecoder()275 void DecodeDataProcess::ReleaseVideoDecoder()
276 {
277     DHLOGD("Start release videoDecoder.");
278     std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
279     std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
280     if (videoDecoder_ == nullptr) {
281         DHLOGE("The video decoder does not exist before ReleaseVideoDecoder.");
282         decodeVideoCallback_ = nullptr;
283         return;
284     }
285     int32_t ret = StopVideoDecoder();
286     CHECK_AND_LOG(ret != DCAMERA_OK, "%{public}s", "StopVideoDecoder failed.");
287     ret = videoDecoder_->Release();
288     CHECK_AND_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK,
289         "VideoDecoder release failed. ret %{public}d.", ret);
290     videoDecoder_ = nullptr;
291     decodeVideoCallback_ = nullptr;
292 }
293 
ReleaseDecoderSurface()294 void DecodeDataProcess::ReleaseDecoderSurface()
295 {
296     if (decodeConsumerSurface_ == nullptr) {
297         decodeProducerSurface_ = nullptr;
298         DHLOGE("The decode consumer surface does not exist before UnregisterConsumerListener.");
299         return;
300     }
301     int32_t ret = decodeConsumerSurface_->UnregisterConsumerListener();
302     CHECK_AND_LOG(ret != SURFACE_ERROR_OK, "VideoDecoder release failed. ret %d.", ret);
303     decodeConsumerSurface_ = nullptr;
304     decodeProducerSurface_ = nullptr;
305 }
306 
ReleaseCodecEvent()307 void DecodeDataProcess::ReleaseCodecEvent()
308 {
309     if ((decEventHandler_ != nullptr) && (decEventHandler_->GetEventRunner() != nullptr)) {
310         decEventHandler_->GetEventRunner()->Stop();
311         eventThread_.join();
312     }
313     decEventHandler_ = nullptr;
314     pipeSrcEventHandler_ = nullptr;
315     DHLOGD("Release DecodeNode eventBusDecode and eventBusPipeline end.");
316 }
317 
ReleaseProcessNode()318 void DecodeDataProcess::ReleaseProcessNode()
319 {
320     DHLOGD("Start release [%{public}zu] node : DecodeNode.", nodeRank_);
321     isDecoderProcess_.store(false);
322     ReleaseVideoDecoder();
323     ReleaseDecoderSurface();
324     ReleaseCodecEvent();
325 
326     processType_ = "";
327     std::queue<std::shared_ptr<DataBuffer>>().swap(inputBuffersQueue_);
328     std::queue<uint32_t>().swap(availableInputIndexsQueue_);
329     std::queue<std::shared_ptr<Media::AVSharedMemory>>().swap(availableInputBufferQueue_);
330     std::deque<DCameraFrameInfo>().swap(frameInfoDeque_);
331     waitDecoderOutputCount_ = 0;
332     lastFeedDecoderInputBufferTimeUs_ = 0;
333     outputTimeStampUs_ = 0;
334     alignedHeight_ = 0;
335 
336     if (nextDataProcess_ != nullptr) {
337         nextDataProcess_->ReleaseProcessNode();
338         nextDataProcess_ = nullptr;
339     }
340     DHLOGD("Release [%{public}zu] node : DecodeNode end.", nodeRank_);
341 }
342 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)343 int32_t DecodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
344 {
345     DHLOGD("Process data in DecodeDataProcess.");
346     if (inputBuffers.empty() || inputBuffers[0] == nullptr) {
347         DHLOGE("The input data buffers is empty.");
348         return DCAMERA_BAD_VALUE;
349     }
350     DumpFileUtil::OpenDumpFile(DUMP_SERVER_PARA, DUMP_DCAMERA_BEFORE_DEC_FILENAME, &dumpDecBeforeFile_);
351     DumpFileUtil::OpenDumpFile(DUMP_SERVER_PARA, DUMP_DCAMERA_AFTER_DEC_FILENAME, &dumpDecAfterFile_);
352     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
353         DHLOGD("The target VideoCodecType : %{public}d is the same as the source VideoCodecType : %{public}d.",
354             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
355         return DecodeDone(inputBuffers);
356     }
357 
358     if (videoDecoder_ == nullptr) {
359         DHLOGE("The video decoder does not exist before decoding data.");
360         return DCAMERA_INIT_ERR;
361     }
362     if (inputBuffersQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
363         DHLOGE("video decoder input buffers queue over flow.");
364         return DCAMERA_INDEX_OVERFLOW;
365     }
366     if (inputBuffers[0]->Size() > MAX_YUV420_BUFFER_SIZE) {
367         DHLOGE("DecodeNode input buffer size %{public}zu error.", inputBuffers[0]->Size());
368         return DCAMERA_MEMORY_OPT_ERROR;
369     }
370     if (!isDecoderProcess_.load()) {
371         DHLOGE("Decoder node occurred error or start release.");
372         return DCAMERA_DISABLE_PROCESS;
373     }
374     inputBuffersQueue_.push(inputBuffers[0]);
375     DHLOGD("Push inputBuf sucess. BufSize %{public}zu, QueueSize %{public}zu.", inputBuffers[0]->Size(),
376         inputBuffersQueue_.size());
377     int32_t err = FeedDecoderInputBuffer();
378     if (err != DCAMERA_OK) {
379         int32_t sleepTimeUs = 5000;
380         std::this_thread::sleep_for(std::chrono::microseconds(sleepTimeUs));
381         DHLOGD("Feed decoder input buffer failed. Try FeedDecoderInputBuffer again.");
382         auto sendFunc = [this]() mutable {
383             int32_t ret = FeedDecoderInputBuffer();
384             DHLOGD("excute FeedDecoderInputBuffer ret %{public}d.", ret);
385         };
386         CHECK_AND_RETURN_RET_LOG(pipeSrcEventHandler_ == nullptr, DCAMERA_BAD_VALUE,
387             "%{public}s", "pipeSrcEventHandler_ is nullptr.");
388         pipeSrcEventHandler_->PostTask(sendFunc);
389     }
390     return DCAMERA_OK;
391 }
392 
BeforeDecodeDump(uint8_t * buffer,size_t bufSize)393 void DecodeDataProcess::BeforeDecodeDump(uint8_t *buffer, size_t bufSize)
394 {
395 #ifdef DUMP_DCAMERA_FILE
396     if (buffer == nullptr) {
397         DHLOGE("dumpsaving : input param nullptr.");
398         return;
399     }
400     if (DcameraHidumper::GetInstance().GetDumpFlag() && (IsUnderDumpMaxSize(DUMP_PATH + BEFORE_DECODE) == DCAMERA_OK)) {
401         DumpBufferToFile(DUMP_PATH + BEFORE_DECODE, buffer, bufSize);
402     }
403 #endif
404     return;
405 }
406 
FeedDecoderInputBuffer()407 int32_t DecodeDataProcess::FeedDecoderInputBuffer()
408 {
409     DHLOGD("Feed decoder input buffer.");
410     while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_.load())) {
411         std::shared_ptr<DataBuffer> buffer = inputBuffersQueue_.front();
412         if (buffer == nullptr || availableInputIndexsQueue_.empty() || availableInputBufferQueue_.empty()) {
413             DHLOGE("inputBuffersQueue size %{public}zu, availableInputIndexsQueue size %{public}zu, "
414                 "availableInputBufferQueue size %{public}zu",
415                 inputBuffersQueue_.size(), availableInputIndexsQueue_.size(), availableInputBufferQueue_.size());
416             return DCAMERA_BAD_VALUE;
417         }
418         buffer->frameInfo_.timePonit.startDecode = GetNowTimeStampUs();
419         {
420             std::lock_guard<std::mutex> lock(mtxDequeLock_);
421             frameInfoDeque_.push_back(buffer->frameInfo_);
422         }
423         int64_t timeStamp = buffer->frameInfo_.pts;
424         {
425             std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
426             CHECK_AND_RETURN_RET_LOG(
427                 videoDecoder_ == nullptr, DCAMERA_OK, "The video decoder does not exist before GetInputBuffer.");
428             uint32_t index = availableInputIndexsQueue_.front();
429             std::shared_ptr<Media::AVSharedMemory> sharedMemoryInput = availableInputBufferQueue_.front();
430             if (sharedMemoryInput == nullptr) {
431                 DHLOGE("Failed to obtain the input shared memory corresponding to the [%{public}u] index.", index);
432                 return DCAMERA_BAD_VALUE;
433             }
434             BeforeDecodeDump(buffer->Data(), buffer->Size());
435             DumpFileUtil::WriteDumpFile(dumpDecBeforeFile_, static_cast<void *>(buffer->Data()), buffer->Size());
436             size_t inputMemoDataSize = static_cast<size_t>(sharedMemoryInput->GetSize());
437             errno_t err = memcpy_s(sharedMemoryInput->GetBase(), inputMemoDataSize, buffer->Data(), buffer->Size());
438             CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR, "memcpy_s buffer failed.");
439             DHLOGD("Decoder input buffer size %{public}zu, timeStamp %{public}" PRId64"us.", buffer->Size(), timeStamp);
440             MediaAVCodec::AVCodecBufferInfo bufferInfo {timeStamp, static_cast<int32_t>(buffer->Size()), 0};
441             int32_t ret = videoDecoder_->QueueInputBuffer(index, bufferInfo,
442                 MediaAVCodec::AVCODEC_BUFFER_FLAG_NONE);
443             if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
444                 DHLOGE("queue Input buffer failed.");
445                 return DCAMERA_BAD_OPERATE;
446             }
447         }
448 
449         inputBuffersQueue_.pop();
450         DHLOGD("Push inputBuffer sucess. inputBuffersQueue size is %{public}zu.", inputBuffersQueue_.size());
451 
452         IncreaseWaitDecodeCnt();
453     }
454     return DCAMERA_OK;
455 }
456 
GetDecoderTimeStamp()457 int64_t DecodeDataProcess::GetDecoderTimeStamp()
458 {
459     int64_t TimeIntervalStampUs = 0;
460     int64_t nowTimeUs = GetNowTimeStampUs();
461     if (lastFeedDecoderInputBufferTimeUs_ == 0) {
462         lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
463         return TimeIntervalStampUs;
464     }
465     TimeIntervalStampUs = nowTimeUs - lastFeedDecoderInputBufferTimeUs_;
466     lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
467     return TimeIntervalStampUs;
468 }
469 
IncreaseWaitDecodeCnt()470 void DecodeDataProcess::IncreaseWaitDecodeCnt()
471 {
472     std::lock_guard<std::mutex> lck(mtxHoldCount_);
473     availableInputIndexsQueue_.pop();
474     availableInputBufferQueue_.pop();
475     waitDecoderOutputCount_++;
476     DHLOGD("Wait decoder output frames number is %{public}d.", waitDecoderOutputCount_);
477 }
478 
ReduceWaitDecodeCnt()479 void DecodeDataProcess::ReduceWaitDecodeCnt()
480 {
481     std::lock_guard<std::mutex> lck(mtxHoldCount_);
482     if (waitDecoderOutputCount_ <= 0) {
483         DHLOGE("The waitDecoderOutputCount_ = %{public}d.", waitDecoderOutputCount_);
484     }
485     if (outputTimeStampUs_ == 0) {
486         waitDecoderOutputCount_ -= FIRST_FRAME_INPUT_NUM;
487     } else {
488         waitDecoderOutputCount_--;
489     }
490     DHLOGD("Wait decoder output frames number is %{public}d.", waitDecoderOutputCount_);
491 }
492 
OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface> & surface)493 void DecodeDataProcess::OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface>& surface)
494 {
495     auto sendFunc = [this, surface]() mutable {
496         GetDecoderOutputBuffer(surface);
497         DHLOGD("excute GetDecoderOutputBuffer.");
498     };
499     if (decEventHandler_ != nullptr) {
500         decEventHandler_->PostTask(sendFunc);
501     }
502 }
503 
GetDecoderOutputBuffer(const sptr<IConsumerSurface> & surface)504 void DecodeDataProcess::GetDecoderOutputBuffer(const sptr<IConsumerSurface>& surface)
505 {
506     DHLOGD("Get decoder output buffer.");
507     if (surface == nullptr) {
508         DHLOGE("Get decode consumer surface failed.");
509         return;
510     }
511     Rect damage = {0, 0, 0, 0};
512     int32_t acquireFence = 0;
513     int64_t timeStamp = 0;
514     sptr<SurfaceBuffer> surfaceBuffer = nullptr;
515     GSError ret = surface->AcquireBuffer(surfaceBuffer, acquireFence, timeStamp, damage);
516     if (ret != GSERROR_OK || surfaceBuffer == nullptr) {
517         DHLOGE("Acquire surface buffer failed!");
518         return;
519     }
520     int32_t alignedWidth = surfaceBuffer->GetStride();
521     if (surfaceBuffer->GetSize() > BUFFER_MAX_SIZE || alignedWidth > ALIGNED_WIDTH_MAX_SIZE) {
522         DHLOGE("surface buffer size or alignedWidth too long");
523         return;
524     }
525     int32_t alignedHeight = alignedHeight_;
526     DHLOGD("OutputBuffer alignedWidth %{public}d, alignedHeight %{public}d, timeStamp %{public}ld ns.",
527         alignedWidth, alignedHeight, timeStamp);
528     CopyDecodedImage(surfaceBuffer, alignedWidth, alignedHeight);
529     surface->ReleaseBuffer(surfaceBuffer, -1);
530     outputTimeStampUs_ = timeStamp;
531     ReduceWaitDecodeCnt();
532 }
533 
CopyDecodedImage(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)534 void DecodeDataProcess::CopyDecodedImage(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
535     int32_t alignedHeight)
536 {
537     if (!IsCorrectSurfaceBuffer(surBuf, alignedWidth, alignedHeight)) {
538         DHLOGE("Surface output buffer error.");
539         return;
540     }
541 
542     DHLOGD("Convert NV12 to I420, format=%{public}d, width=[%{public}d, %{public}d], height=[%{public}d, %{public}d]",
543         sourceConfig_.GetVideoformat(), sourceConfig_.GetWidth(), alignedWidth, sourceConfig_.GetHeight(),
544         alignedHeight);
545     int srcSizeY = alignedWidth * alignedHeight;
546     uint8_t *srcDataY = static_cast<uint8_t *>(surBuf->GetVirAddr());
547     uint8_t *srcDataUV = static_cast<uint8_t *>(surBuf->GetVirAddr()) + srcSizeY;
548 
549     int dstSizeY = sourceConfig_.GetWidth() * sourceConfig_.GetHeight();
550     int dstSizeUV = (static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV) *
551                     (static_cast<uint32_t>(sourceConfig_.GetHeight()) >> MEMORY_RATIO_UV);
552     std::shared_ptr<DataBuffer> bufferOutput =
553         std::make_shared<DataBuffer>(dstSizeY * YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
554     uint8_t *dstDataY = bufferOutput->Data();
555     uint8_t *dstDataU = bufferOutput->Data() + dstSizeY;
556     uint8_t *dstDataV = bufferOutput->Data() + dstSizeY + dstSizeUV;
557     auto converter = ConverterHandle::GetInstance().GetHandle();
558     CHECK_AND_RETURN_LOG(converter.NV12ToI420 == nullptr, "converter is null.");
559     int32_t ret = converter.NV12ToI420(srcDataY, alignedWidth, srcDataUV, alignedWidth,
560         dstDataY, sourceConfig_.GetWidth(),
561         dstDataU, static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV,
562         dstDataV, static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV,
563         processedConfig_.GetWidth(), processedConfig_.GetHeight());
564     if (ret != DCAMERA_OK) {
565         DHLOGE("Convert NV12 to I420 failed.");
566         return;
567     }
568     {
569         std::lock_guard<std::mutex> lock(mtxDequeLock_);
570         bufferOutput->frameInfo_ = frameInfoDeque_.front();
571         frameInfoDeque_.pop_front();
572     }
573     bufferOutput->SetInt32("Videoformat", static_cast<int32_t>(Videoformat::YUVI420));
574     bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth());
575     bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight());
576     bufferOutput->SetInt32("width", processedConfig_.GetWidth());
577     bufferOutput->SetInt32("height", processedConfig_.GetHeight());
578 #ifdef DUMP_DCAMERA_FILE
579     std::string fileName = "SourceAfterDecode_width(" + std::to_string(processedConfig_.GetWidth())
580         + ")height(" + std::to_string(processedConfig_.GetHeight()) + ").yuv";
581     if (DcameraHidumper::GetInstance().GetDumpFlag() && (IsUnderDumpMaxSize(DUMP_PATH + fileName) == DCAMERA_OK)) {
582         DumpBufferToFile(DUMP_PATH + fileName, bufferOutput->Data(), bufferOutput->Size());
583     }
584 #endif
585     DumpFileUtil::WriteDumpFile(dumpDecAfterFile_, static_cast<void *>(bufferOutput->Data()), bufferOutput->Size());
586     PostOutputDataBuffers(bufferOutput);
587 }
588 
IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)589 bool DecodeDataProcess::IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
590     int32_t alignedHeight)
591 {
592     if (surBuf == nullptr) {
593         DHLOGE("surface buffer is null!");
594         return false;
595     }
596 
597     size_t yuvImageAlignedSize = static_cast<size_t>(alignedWidth * alignedHeight *
598                                                               YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
599     size_t yuvImageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
600                                                        YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
601     size_t surfaceBufSize = static_cast<size_t>(surBuf->GetSize());
602     if (yuvImageAlignedSize > surfaceBufSize || yuvImageAlignedSize < yuvImageSize) {
603         DHLOGE("Buffer size error, yuvImageSize %{public}zu, yuvImageAlignedSize %{public}zu, surBufSize %{public}"
604             PRIu32, yuvImageSize, yuvImageAlignedSize, surBuf->GetSize());
605         return false;
606     }
607     return true;
608 }
609 
PostOutputDataBuffers(std::shared_ptr<DataBuffer> & outputBuffer)610 void DecodeDataProcess::PostOutputDataBuffers(std::shared_ptr<DataBuffer>& outputBuffer)
611 {
612     if (decEventHandler_ == nullptr || outputBuffer == nullptr) {
613         DHLOGE("decEventHandler_ or outputBuffer is null.");
614         return;
615     }
616     auto sendFunc = [this, outputBuffer]() mutable {
617         std::vector<std::shared_ptr<DataBuffer>> multiDataBuffers;
618         multiDataBuffers.push_back(outputBuffer);
619         int32_t ret = DecodeDone(multiDataBuffers);
620         DHLOGD("excute DecodeDone ret %{public}d.", ret);
621     };
622     if (decEventHandler_ != nullptr) {
623         decEventHandler_->PostTask(sendFunc);
624     }
625     DHLOGD("Send video decoder output asynchronous DCameraCodecEvents success.");
626 }
627 
DecodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)628 int32_t DecodeDataProcess::DecodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
629 {
630     DHLOGD("Decoder Done.");
631     if (outputBuffers.empty()) {
632         DHLOGE("The received data buffers is empty.");
633         return DCAMERA_BAD_VALUE;
634     }
635 
636     if (nextDataProcess_ != nullptr) {
637         DHLOGD("Send to the next node of the decoder for processing.");
638         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
639         if (err != DCAMERA_OK) {
640             DHLOGE("Someone node after the decoder processes failed.");
641         }
642         return err;
643     }
644     DHLOGD("The current node is the last node, and Output the processed video buffer");
645     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
646     if (targetPipelineSource == nullptr) {
647         DHLOGE("callbackPipelineSource_ is nullptr.");
648         return DCAMERA_BAD_VALUE;
649     }
650     targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]);
651     return DCAMERA_OK;
652 }
653 
OnError()654 void DecodeDataProcess::OnError()
655 {
656     DHLOGD("DecodeDataProcess : OnError.");
657     isDecoderProcess_.store(false);
658     if (videoDecoder_ != nullptr) {
659         videoDecoder_->Stop();
660     }
661     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
662     if (targetPipelineSource == nullptr) {
663         DHLOGE("callbackPipelineSource_ is nullptr.");
664         return;
665     }
666     targetPipelineSource->OnError(DataProcessErrorType::ERROR_PIPELINE_DECODER);
667 }
668 
OnInputBufferAvailable(uint32_t index,std::shared_ptr<Media::AVSharedMemory> buffer)669 void DecodeDataProcess::OnInputBufferAvailable(uint32_t index, std::shared_ptr<Media::AVSharedMemory> buffer)
670 {
671     DHLOGD("DecodeDataProcess::OnInputBufferAvailable");
672     std::lock_guard<std::mutex> lck(mtxHoldCount_);
673     if (availableInputIndexsQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
674         DHLOGE("Video decoder available indexs queue overflow.");
675         return;
676     }
677     DHLOGD("Video decoder available indexs queue push index [%{public}u].", index);
678     availableInputIndexsQueue_.push(index);
679     availableInputBufferQueue_.push(buffer);
680 }
681 
OnOutputFormatChanged(const Media::Format & format)682 void DecodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
683 {
684     if (decodeOutputFormat_.GetFormatMap().empty()) {
685         DHLOGE("The first changed video decoder output format is null.");
686         return;
687     }
688     decodeOutputFormat_ = format;
689 }
690 
OnOutputBufferAvailable(uint32_t index,const MediaAVCodec::AVCodecBufferInfo & info,const MediaAVCodec::AVCodecBufferFlag & flag,std::shared_ptr<Media::AVSharedMemory> buffer)691 void DecodeDataProcess::OnOutputBufferAvailable(uint32_t index, const MediaAVCodec::AVCodecBufferInfo& info,
692     const MediaAVCodec::AVCodecBufferFlag& flag, std::shared_ptr<Media::AVSharedMemory> buffer)
693 {
694     int64_t finishDecodeT = GetNowTimeStampUs();
695     if (!isDecoderProcess_.load()) {
696         DHLOGE("Decoder node occurred error or start release.");
697         return;
698     }
699     DHLOGD("Video decode buffer info: presentation TimeUs %{public}" PRId64", size %{public}d, offset %{public}d, "
700         "flag %{public}d", info.presentationTimeUs, info.size, info.offset, flag);
701     outputInfo_ = info;
702     {
703         std::lock_guard<std::mutex> lock(mtxDequeLock_);
704         AlignFirstFrameTime();
705         for (auto it = frameInfoDeque_.begin(); it != frameInfoDeque_.end(); it++) {
706             DCameraFrameInfo frameInfo = *it;
707             if (frameInfo.timePonit.finishDecode != 0) {
708                 continue;
709             }
710             frameInfo.timePonit.finishDecode = finishDecodeT;
711             frameInfoDeque_.emplace(frameInfoDeque_.erase(it), frameInfo);
712             break;
713         }
714     }
715     {
716         std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
717         if (videoDecoder_ == nullptr) {
718             DHLOGE("The video decoder does not exist before decoding data.");
719             return;
720         }
721         int32_t errRelease = videoDecoder_->ReleaseOutputBuffer(index, true);
722         if (errRelease != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
723             DHLOGE("The video decoder output decoded data to surfacebuffer failed, index : [%{public}u].", index);
724         }
725     }
726 }
727 
GetSourceConfig() const728 VideoConfigParams DecodeDataProcess::GetSourceConfig() const
729 {
730     return sourceConfig_;
731 }
732 
GetTargetConfig() const733 VideoConfigParams DecodeDataProcess::GetTargetConfig() const
734 {
735     return targetConfig_;
736 }
737 
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)738 int32_t DecodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
739 {
740     return DCAMERA_OK;
741 }
742 
AlignFirstFrameTime()743 void DecodeDataProcess::AlignFirstFrameTime()
744 {
745     if (frameInfoDeque_.empty()) {
746         return;
747     }
748     DCameraFrameInfo frameInfo = frameInfoDeque_.front();
749     if (frameInfo.index != FRAME_HEAD || frameInfo.type != MediaAVCodec::AVCODEC_BUFFER_FLAG_CODEC_DATA) {
750         return;
751     }
752     frameInfoDeque_.pop_front();
753     DCameraFrameInfo front = frameInfoDeque_.front();
754     frameInfo.index = front.index;
755     frameInfo.pts = front.pts;
756     frameInfo.offset = front.offset;
757     frameInfo.type = front.type;
758     frameInfo.ver = front.ver;
759     frameInfo.timePonit.finishEncode = front.timePonit.finishEncode;
760     frameInfoDeque_.emplace(frameInfoDeque_.erase(frameInfoDeque_.begin()), frameInfo);
761 }
762 } // namespace DistributedHardware
763 } // namespace OHOS
764