1 /*
2 * Copyright (c) 2024-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "avcodec_task_manager.h"
17
18 #include <algorithm>
19 #include <chrono>
20 #include <cinttypes>
21 #include <cstdint>
22 #include <fcntl.h>
23 #include <memory>
24 #include <mutex>
25 #include <unistd.h>
26 #include <utility>
27 #include "datetime_ex.h"
28 #include "camera_util.h"
29 #include "datetime_ex.h"
30 #include "audio_capturer_session.h"
31 #include "audio_record.h"
32 #include "audio_video_muxer.h"
33 #include "camera_log.h"
34 #include "datetime_ex.h"
35 #include "external_window.h"
36 #include "frame_record.h"
37 #include "native_avbuffer.h"
38 #include "native_avbuffer_info.h"
39 #include "native_buffer_inner.h"
40 #include "sample_info.h"
41 #include "native_mfmagic.h"
42
43 namespace {
44 using namespace std::string_literals;
45 using namespace std::chrono_literals;
46 } // namespace
47 namespace OHOS {
48 namespace CameraStandard {
49
~AvcodecTaskManager()50 AvcodecTaskManager::~AvcodecTaskManager()
51 {
52 CAMERA_SYNC_TRACE;
53 Release();
54 }
55
AvcodecTaskManager(sptr<AudioCapturerSession> audioCaptureSession,VideoCodecType type)56 AvcodecTaskManager::AvcodecTaskManager(sptr<AudioCapturerSession> audioCaptureSession,
57 VideoCodecType type) : videoCodecType_(type)
58 {
59 CAMERA_SYNC_TRACE;
60 #ifdef MOVING_PHOTO_ADD_AUDIO
61 audioCapturerSession_ = audioCaptureSession;
62 audioEncoder_ = make_unique<AudioEncoder>();
63 #endif
64 // Create Task Manager
65 videoEncoder_ = make_unique<VideoEncoder>(type);
66 }
67
GetTaskManager()68 shared_ptr<TaskManager>& AvcodecTaskManager::GetTaskManager()
69 {
70 lock_guard<mutex> lock(taskManagerMutex_);
71 if (taskManager_ == nullptr && isActive_.load()) {
72 taskManager_ = make_unique<TaskManager>("AvcodecTaskManager", DEFAULT_THREAD_NUMBER, false);
73 }
74 return taskManager_;
75 }
76
GetEncoderManager()77 shared_ptr<TaskManager>& AvcodecTaskManager::GetEncoderManager()
78 {
79 lock_guard<mutex> lock(encoderManagerMutex_);
80 if (videoEncoderManager_ == nullptr && isActive_.load()) {
81 videoEncoderManager_ = make_unique<TaskManager>("VideoTaskManager", DEFAULT_ENCODER_THREAD_NUMBER, true);
82 }
83 return videoEncoderManager_;
84 }
85
EncodeVideoBuffer(sptr<FrameRecord> frameRecord,CacheCbFunc cacheCallback)86 void AvcodecTaskManager::EncodeVideoBuffer(sptr<FrameRecord> frameRecord, CacheCbFunc cacheCallback)
87 {
88 auto thisPtr = sptr<AvcodecTaskManager>(this);
89 auto encodeManager = GetEncoderManager();
90 if (!encodeManager) {
91 return;
92 }
93 encodeManager->SubmitTask([thisPtr, frameRecord, cacheCallback]() {
94 CAMERA_SYNC_TRACE;
95 bool isEncodeSuccess = false;
96 if (!thisPtr->videoEncoder_ && !frameRecord) {
97 return;
98 }
99 isEncodeSuccess = thisPtr->videoEncoder_->EncodeSurfaceBuffer(frameRecord);
100 if (isEncodeSuccess) {
101 thisPtr->videoEncoder_->ReleaseSurfaceBuffer(frameRecord);
102 }
103 frameRecord->SetEncodedResult(isEncodeSuccess);
104 frameRecord->SetFinishStatus();
105 if (isEncodeSuccess) {
106 MEDIA_INFO_LOG("encode image success %{public}s, refCount: %{public}d", frameRecord->GetFrameId().c_str(),
107 frameRecord->GetSptrRefCount());
108 } else {
109 MEDIA_ERR_LOG("encode image fail %{public}s", frameRecord->GetFrameId().c_str());
110 }
111 if (cacheCallback) {
112 cacheCallback(frameRecord, isEncodeSuccess);
113 }
114 });
115 }
116
SubmitTask(function<void ()> task)117 void AvcodecTaskManager::SubmitTask(function<void()> task)
118 {
119 auto taskManager = GetTaskManager();
120 if (taskManager) {
121 taskManager->SubmitTask(task);
122 }
123 }
124
SetVideoFd(int64_t timestamp,PhotoAssetIntf * photoAssetProxy,int32_t captureId)125 void AvcodecTaskManager::SetVideoFd(int64_t timestamp, PhotoAssetIntf* photoAssetProxy, int32_t captureId)
126 {
127 lock_guard<mutex> lock(videoFdMutex_);
128 MEDIA_INFO_LOG("Set timestamp: %{public}" PRIu64 ", captureId: %{public}d", timestamp, captureId);
129 videoFdMap_.insert(std::make_pair(captureId, std::make_pair(timestamp, photoAssetProxy)));
130 MEDIA_DEBUG_LOG("video map size:%{public}zu", videoFdMap_.size());
131 cvEmpty_.notify_all();
132 }
133
CreateAVMuxer(vector<sptr<FrameRecord>> frameRecords,int32_t captureRotation,vector<sptr<FrameRecord>> & choosedBuffer,int32_t captureId)134 sptr<AudioVideoMuxer> AvcodecTaskManager::CreateAVMuxer(vector<sptr<FrameRecord>> frameRecords, int32_t captureRotation,
135 vector<sptr<FrameRecord>> &choosedBuffer, int32_t captureId)
136 {
137 CAMERA_SYNC_TRACE;
138 unique_lock<mutex> lock(videoFdMutex_);
139 auto thisPtr = sptr<AvcodecTaskManager>(this);
140 if (videoFdMap_.empty()) {
141 bool waitResult = false;
142 waitResult = cvEmpty_.wait_for(lock, std::chrono::milliseconds(GET_FD_EXPIREATION_TIME),
143 [thisPtr] { return !thisPtr->videoFdMap_.empty(); });
144 CHECK_ERROR_RETURN_RET(!waitResult || videoFdMap_.empty(), nullptr);
145 }
146 sptr<AudioVideoMuxer> muxer = new AudioVideoMuxer();
147 OH_AVOutputFormat format = AV_OUTPUT_FORMAT_MPEG_4;
148 int64_t timestamp = videoFdMap_[captureId].first;
149 auto photoAssetProxy = videoFdMap_[captureId].second;
150 videoFdMap_.erase(captureId);
151 ChooseVideoBuffer(frameRecords, choosedBuffer, timestamp, captureId);
152 muxer->Create(format, photoAssetProxy);
153 muxer->SetRotation(captureRotation);
154 if (!choosedBuffer.empty()) {
155 muxer->SetCoverTime(NanosecToMillisec(timestamp - choosedBuffer.front()->GetTimeStamp()));
156 }
157 auto formatVideo = make_shared<Format>();
158 MEDIA_INFO_LOG("CreateAVMuxer videoCodecType_ = %{public}d", videoCodecType_);
159 formatVideo->PutStringValue(MediaDescriptionKey::MD_KEY_CODEC_MIME, videoCodecType_
160 == VIDEO_ENCODE_TYPE_HEVC ? OH_AVCODEC_MIMETYPE_VIDEO_HEVC : OH_AVCODEC_MIMETYPE_VIDEO_AVC);
161 formatVideo->PutIntValue(MediaDescriptionKey::MD_KEY_WIDTH, frameRecords[0]->GetFrameSize()->width);
162 formatVideo->PutIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, frameRecords[0]->GetFrameSize()->height);
163 formatVideo->PutDoubleValue(MediaDescriptionKey::MD_KEY_FRAME_RATE, VIDEO_FRAME_RATE);
164 int videoTrackId = -1;
165 muxer->AddTrack(videoTrackId, formatVideo, VIDEO_TRACK);
166 int audioTrackId = -1;
167 #ifdef MOVING_PHOTO_ADD_AUDIO
168 auto formatAudio = make_shared<Format>();
169 formatAudio->PutStringValue(MediaDescriptionKey::MD_KEY_CODEC_MIME, OH_AVCODEC_MIMETYPE_AUDIO_AAC);
170 formatAudio->PutIntValue(MediaDescriptionKey::MD_KEY_SAMPLE_RATE, SAMPLERATE_32000);
171 formatAudio->PutIntValue(MediaDescriptionKey::MD_KEY_CHANNEL_COUNT, DEFAULT_CHANNEL_COUNT);
172 muxer->AddTrack(audioTrackId, formatAudio, AUDIO_TRACK);
173 #endif
174 int metaTrackId = -1;
175 auto formatMeta = make_shared<Format>();
176 formatMeta->PutStringValue(MediaDescriptionKey::MD_KEY_CODEC_MIME, TIMED_METADATA_TRACK_MIMETYPE);
177 formatMeta->PutStringValue(MediaDescriptionKey::MD_KEY_TIMED_METADATA_KEY, TIMED_METADATA_KEY);
178 formatMeta->PutIntValue(MediaDescriptionKey::MD_KEY_TIMED_METADATA_SRC_TRACK_ID, videoTrackId);
179 muxer->AddTrack(metaTrackId, formatMeta, META_TRACK);
180 MEDIA_INFO_LOG("CreateMuxer vId:%{public}d,aid:%{public}d,mid:%{public}d", videoTrackId, audioTrackId, metaTrackId);
181 muxer->SetTimedMetadata();
182 muxer->Start();
183 return muxer;
184 }
185
FinishMuxer(sptr<AudioVideoMuxer> muxer)186 void AvcodecTaskManager::FinishMuxer(sptr<AudioVideoMuxer> muxer)
187 {
188 CAMERA_SYNC_TRACE;
189 MEDIA_INFO_LOG("doMxuer video is finished");
190 if (muxer) {
191 muxer->Stop();
192 muxer->Release();
193 PhotoAssetIntf* proxy = muxer->GetPhotoAssetProxy();
194 MEDIA_INFO_LOG("PhotoAssetProxy notify enter");
195 if (proxy) {
196 proxy->NotifyVideoSaveFinished();
197 delete proxy;
198 }
199 }
200 }
201
DoMuxerVideo(vector<sptr<FrameRecord>> frameRecords,uint64_t taskName,int32_t captureRotation,int32_t captureId)202 void AvcodecTaskManager::DoMuxerVideo(vector<sptr<FrameRecord>> frameRecords, uint64_t taskName,
203 int32_t captureRotation, int32_t captureId) __attribute__((no_sanitize("cfi")))
204 {
205 CAMERA_SYNC_TRACE;
206 CHECK_ERROR_RETURN_LOG(frameRecords.empty(), "DoMuxerVideo error of empty encoded frame");
207 auto thisPtr = sptr<AvcodecTaskManager>(this);
208 auto taskManager = GetTaskManager();
209 CHECK_ERROR_RETURN_LOG(taskManager == nullptr, "GetTaskManager is null");
210 GetTaskManager()->SubmitTask([thisPtr, frameRecords, captureRotation, captureId]() {
211 CAMERA_SYNC_TRACE;
212 MEDIA_INFO_LOG("CreateAVMuxer with %{public}zu", frameRecords.size());
213 vector<sptr<FrameRecord>> choosedBuffer;
214 sptr<AudioVideoMuxer> muxer = thisPtr->CreateAVMuxer(frameRecords, captureRotation, choosedBuffer, captureId);
215 CHECK_ERROR_RETURN_LOG(muxer == nullptr, "CreateAVMuxer failed");
216 CHECK_ERROR_RETURN_LOG(choosedBuffer.empty(), "choosed empty buffer!");
217 int64_t videoStartTime = choosedBuffer.front()->GetTimeStamp();
218 for (size_t index = 0; index < choosedBuffer.size(); index++) {
219 OH_AVBuffer* buffer = choosedBuffer[index]->encodedBuffer;
220 {
221 std::lock_guard<std::mutex> lock(choosedBuffer[index]->bufferMutex_);
222 OH_AVCodecBufferAttr attr = { 0, 0, 0, AVCODEC_BUFFER_FLAGS_NONE };
223 CHECK_AND_CONTINUE_LOG(buffer != nullptr, "video encodedBuffer is null");
224 OH_AVBuffer_GetBufferAttr(buffer, &attr);
225 attr.pts = NanosecToMicrosec(choosedBuffer[index]->GetTimeStamp() - videoStartTime);
226 MEDIA_DEBUG_LOG("choosed buffer pts: %{public}" PRIu64, attr.pts);
227 OH_AVBuffer_SetBufferAttr(buffer, &attr);
228 muxer->WriteSampleBuffer(buffer->buffer_, VIDEO_TRACK);
229 }
230 sptr<SurfaceBuffer> metaSurfaceBuffer = frameRecords[index]->GetMetaBuffer();
231 if (metaSurfaceBuffer) {
232 shared_ptr<AVBuffer> metaAvBuffer = AVBuffer::CreateAVBuffer(metaSurfaceBuffer);
233 metaAvBuffer->pts_ = buffer->buffer_->pts_;
234 MEDIA_DEBUG_LOG("metaAvBuffer pts_ %{public}llu, avBufferSize: %{public}d",
235 (long long unsigned)(metaAvBuffer->pts_), metaAvBuffer->memory_->GetSize());
236 muxer->WriteSampleBuffer(metaAvBuffer, META_TRACK);
237 } else {
238 MEDIA_ERR_LOG("metaSurfaceBuffer is nullptr");
239 }
240 frameRecords[index]->UnLockMetaBuffer();
241 }
242 #ifdef MOVING_PHOTO_ADD_AUDIO
243 // CollectAudioBuffer
244 vector<sptr<AudioRecord>> audioRecords;
245 vector<sptr<AudioRecord>> processedAudioRecords;
246 thisPtr->PrepareAudioBuffer(choosedBuffer, audioRecords, processedAudioRecords);
247 thisPtr->CollectAudioBuffer(processedAudioRecords, muxer);
248 #endif
249 thisPtr->FinishMuxer(muxer);
250 });
251 }
252
FindIdrFrameIndex(vector<sptr<FrameRecord>> frameRecords,int64_t shutterTime,int32_t captureId)253 size_t AvcodecTaskManager::FindIdrFrameIndex(vector<sptr<FrameRecord>> frameRecords, int64_t shutterTime,
254 int32_t captureId)
255 {
256 bool isDeblurStartTime = false;
257 std::unique_lock<mutex> startTimeLock(startTimeMutex_);
258 int64_t clearVideoStartTime = shutterTime - preBufferDuration_;
259 if (mPStartTimeMap_.count(captureId) && mPStartTimeMap_[captureId] <= shutterTime
260 && mPStartTimeMap_[captureId] > clearVideoStartTime) {
261 MEDIA_INFO_LOG("set deblur start time is %{public}" PRIu64, mPStartTimeMap_[captureId]);
262 clearVideoStartTime = mPStartTimeMap_[captureId];
263 isDeblurStartTime = true;
264 }
265 mPStartTimeMap_.erase(captureId);
266 startTimeLock.unlock();
267 MEDIA_INFO_LOG("FindIdrFrameIndex captureId : %{public}d, clearVideoStartTime : %{public}" PRIu64,
268 captureId, clearVideoStartTime);
269 size_t idrIndex = frameRecords.size();
270 if (isDeblurStartTime) {
271 for (size_t index = 0; index < frameRecords.size(); ++index) {
272 auto frame = frameRecords[index];
273 if (frame->IsIDRFrame() && frame->GetTimeStamp() <= clearVideoStartTime) {
274 MEDIA_INFO_LOG("FindIdrFrameIndex before start time");
275 idrIndex = index;
276 }
277 }
278 }
279 if (idrIndex == frameRecords.size()) {
280 for (size_t index = 0; index < frameRecords.size(); ++index) {
281 auto frame = frameRecords[index];
282 if (frame->IsIDRFrame() && frame->GetTimeStamp() >= clearVideoStartTime) {
283 MEDIA_INFO_LOG("FindIdrFrameIndex after start time");
284 idrIndex = index;
285 break;
286 }
287 idrIndex = 0;
288 }
289 }
290 return idrIndex;
291 }
292
IgnoreDeblur(vector<sptr<FrameRecord>> frameRecords,vector<sptr<FrameRecord>> & choosedBuffer,int64_t shutterTime)293 void AvcodecTaskManager::IgnoreDeblur(vector<sptr<FrameRecord>> frameRecords,
294 vector<sptr<FrameRecord>> &choosedBuffer, int64_t shutterTime)
295 {
296 MEDIA_INFO_LOG("IgnoreDeblur enter");
297 choosedBuffer.clear();
298 if (!frameRecords.empty()) {
299 auto it = find_if(frameRecords.begin(), frameRecords.end(),
300 [](const sptr<FrameRecord>& frame) { return frame->IsIDRFrame(); });
301 while (it != frameRecords.end()) {
302 choosedBuffer.emplace_back(*it);
303 ++it;
304 }
305 }
306 }
307
ChooseVideoBuffer(vector<sptr<FrameRecord>> frameRecords,vector<sptr<FrameRecord>> & choosedBuffer,int64_t shutterTime,int32_t captureId)308 void AvcodecTaskManager::ChooseVideoBuffer(vector<sptr<FrameRecord>> frameRecords,
309 vector<sptr<FrameRecord>> &choosedBuffer, int64_t shutterTime, int32_t captureId)
310 {
311 choosedBuffer.clear();
312 size_t idrIndex = FindIdrFrameIndex(frameRecords, shutterTime, captureId);
313 std::unique_lock<mutex> endTimeLock(endTimeMutex_);
314 int64_t clearVideoEndTime = shutterTime + postBufferDuration_;
315 if (mPEndTimeMap_.count(captureId) && mPEndTimeMap_[captureId] >= shutterTime
316 && mPEndTimeMap_[captureId] < clearVideoEndTime) {
317 MEDIA_INFO_LOG("set deblur end time is %{public}" PRIu64, mPEndTimeMap_[captureId]);
318 clearVideoEndTime = mPEndTimeMap_[captureId];
319 }
320 mPEndTimeMap_.erase(captureId);
321 endTimeLock.unlock();
322 MEDIA_INFO_LOG("ChooseVideoBuffer captureId : %{public}d, shutterTime : %{public}" PRIu64 ", "
323 "clearVideoEndTime : %{public}" PRIu64, captureId, shutterTime, clearVideoEndTime);
324 size_t frameCount = 0;
325 for (size_t index = idrIndex; index < frameRecords.size(); ++index) {
326 auto frame = frameRecords[index];
327 int64_t timestamp = frame->GetTimeStamp();
328 if (timestamp <= clearVideoEndTime && frameCount < MAX_FRAME_COUNT) {
329 choosedBuffer.push_back(frame);
330 ++frameCount;
331 }
332 }
333 if (choosedBuffer.empty() || !frameRecords[idrIndex]->IsIDRFrame()) {
334 IgnoreDeblur(frameRecords, choosedBuffer, shutterTime);
335 }
336 MEDIA_INFO_LOG("ChooseVideoBuffer with size %{public}zu", choosedBuffer.size());
337 }
338
PrepareAudioBuffer(vector<sptr<FrameRecord>> & choosedBuffer,vector<sptr<AudioRecord>> & audioRecords,vector<sptr<AudioRecord>> & processedAudioRecords)339 void AvcodecTaskManager::PrepareAudioBuffer(vector<sptr<FrameRecord>>& choosedBuffer,
340 vector<sptr<AudioRecord>>& audioRecords, vector<sptr<AudioRecord>>& processedAudioRecords)
341 {
342 int64_t videoStartTime = choosedBuffer.front()->GetTimeStamp();
343 if (audioCapturerSession_) {
344 int64_t startTime = NanosecToMillisec(videoStartTime);
345 int64_t endTime = NanosecToMillisec(choosedBuffer.back()->GetTimeStamp());
346 audioCapturerSession_->GetAudioRecords(startTime, endTime, audioRecords);
347 for (auto ptr: audioRecords) {
348 processedAudioRecords.emplace_back(new AudioRecord(ptr->GetTimeStamp()));
349 }
350 audioCapturerSession_->GetAudioDeferredProcess()->Process(audioRecords, processedAudioRecords);
351 }
352 }
353
CollectAudioBuffer(vector<sptr<AudioRecord>> audioRecordVec,sptr<AudioVideoMuxer> muxer)354 void AvcodecTaskManager::CollectAudioBuffer(vector<sptr<AudioRecord>> audioRecordVec, sptr<AudioVideoMuxer> muxer)
355 {
356 CAMERA_SYNC_TRACE;
357 MEDIA_INFO_LOG("CollectAudioBuffer start with size %{public}zu", audioRecordVec.size());
358 bool isEncodeSuccess = false;
359 CHECK_ERROR_RETURN_LOG(!audioEncoder_ || audioRecordVec.empty() || !muxer,
360 "CollectAudioBuffer cannot find useful data");
361 isEncodeSuccess = audioEncoder_->EncodeAudioBuffer(audioRecordVec);
362 MEDIA_DEBUG_LOG("encode audio buffer result %{public}d", isEncodeSuccess);
363 size_t maxFrameCount = std::min(audioRecordVec.size(), MAX_AUDIO_FRAME_COUNT);
364 for (size_t index = 0; index < maxFrameCount; index++) {
365 OH_AVCodecBufferAttr attr = { 0, 0, 0, AVCODEC_BUFFER_FLAGS_NONE };
366 OH_AVBuffer* buffer = audioRecordVec[index]->encodedBuffer;
367 CHECK_AND_CONTINUE_LOG(buffer != nullptr, "audio encodedBuffer is null");
368 OH_AVBuffer_GetBufferAttr(buffer, &attr);
369 attr.pts = static_cast<int64_t>(index * AUDIO_FRAME_INTERVAL);
370 if (audioRecordVec.size() > 0) {
371 if (index == audioRecordVec.size() - 1) {
372 attr.flags = AVCODEC_BUFFER_FLAGS_EOS;
373 }
374 }
375 OH_AVBuffer_SetBufferAttr(buffer, &attr);
376 muxer->WriteSampleBuffer(buffer->buffer_, AUDIO_TRACK);
377 }
378 MEDIA_INFO_LOG("CollectAudioBuffer finished");
379 }
380
Release()381 void AvcodecTaskManager::Release()
382 {
383 CAMERA_SYNC_TRACE;
384 MEDIA_INFO_LOG("AvcodecTaskManager release start");
385 if (videoEncoder_ != nullptr) {
386 videoEncoder_->Release();
387 }
388 if (audioEncoder_ != nullptr) {
389 audioEncoder_->Release();
390 }
391 unique_lock<mutex> lock(videoFdMutex_);
392 MEDIA_INFO_LOG("AvcodecTaskManager::Release videoFdMap_ size is %{public}zu", videoFdMap_.size());
393 for (auto videoFdPair : videoFdMap_) {
394 PhotoAssetIntf* photoAssetProxy = videoFdPair.second.second;
395 if (photoAssetProxy) {
396 delete photoAssetProxy;
397 }
398 }
399 videoFdMap_.clear();
400 MEDIA_INFO_LOG("AvcodecTaskManager release end");
401 }
402
Stop()403 void AvcodecTaskManager::Stop()
404 {
405 CAMERA_SYNC_TRACE;
406 MEDIA_INFO_LOG("AvcodecTaskManager Stop start");
407 if (videoEncoder_ != nullptr) {
408 videoEncoder_->Stop();
409 }
410 if (audioEncoder_ != nullptr) {
411 audioEncoder_->Stop();
412 }
413 MEDIA_INFO_LOG("AvcodecTaskManager Stop end");
414 }
415
ClearTaskResource()416 void AvcodecTaskManager::ClearTaskResource()
417 {
418 CAMERA_SYNC_TRACE;
419 MEDIA_INFO_LOG("AvcodecTaskManager ClearTaskResource start");
420 {
421 lock_guard<mutex> lock(taskManagerMutex_);
422 isActive_ = false;
423 if (taskManager_ != nullptr) {
424 taskManager_->CancelAllTasks();
425 taskManager_.reset();
426 }
427 }
428 {
429 lock_guard<mutex> lock(encoderManagerMutex_);
430 isActive_ = false;
431 if (videoEncoderManager_ != nullptr) {
432 videoEncoderManager_->CancelAllTasks();
433 videoEncoderManager_.reset();
434 }
435 }
436 {
437 lock_guard<mutex> lock(startTimeMutex_);
438 mPStartTimeMap_.clear();
439 }
440 {
441 lock_guard<mutex> lock(endTimeMutex_);
442 mPEndTimeMap_.clear();
443 }
444 MEDIA_INFO_LOG("AvcodecTaskManager ClearTaskResource end");
445 }
446
SetVideoBufferDuration(uint32_t preBufferCount,uint32_t postBufferCount)447 void AvcodecTaskManager::SetVideoBufferDuration(uint32_t preBufferCount, uint32_t postBufferCount)
448 {
449 MEDIA_INFO_LOG("AvcodecTaskManager SetVideoBufferCount enter");
450 preBufferDuration_ = static_cast<int64_t>(preBufferCount) * ONE_BILLION / VIDEO_FRAME_RATE;
451 postBufferDuration_ = static_cast<int64_t>(postBufferCount) * ONE_BILLION / VIDEO_FRAME_RATE;
452 }
453 } // namespace CameraStandard
454 } // namespace OHOS