1 /*
2  * Copyright (c) 2020-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "camera_device.h"
17 
18 #include <fcntl.h>
19 #include <pthread.h>
20 #include <string>
21 #include <sys/io.h>
22 #include <sys/prctl.h>
23 #include <sys/select.h>
24 #include <thread>
25 #include <unistd.h>
26 #include "codec_interface.h"
27 #include "display_layer.h"
28 #include "hal_camera.h"
29 #include "media_log.h"
30 #include "meta_data.h"
31 #include "securec.h"
32 
33 #include <iostream>
34 
35 using namespace OHOS;
36 using namespace OHOS::Media;
37 using namespace std;
38 
39 /** Indicates that the current frame is an Instantaneous Decoder Refresh (IDR) frame. */
40 const int32_t KEY_IS_SYNC_FRAME = 1;
41 /** Indicates the frame timestamp. */
42 const int32_t KEY_TIME_US = 2;
43 
44 const int32_t IMAGE_WIDTH = 3;       // "DATA_PIX_FORMAT"
45 const int32_t IMAGE_HEIGHT = 4;       // "DATA_PIX_FORMAT"
46 const int32_t IMAGE_SIZE = 5;       // "DATA_PIX_FORMAT"
47 const int32_t DELAY_TIME_ONE_FRAME = 30000;
48 const int32_t VIDEO_MAX_NUM = 2;        // "video max num"
49 const int32_t INVALID_STREAM_ID = -1;
50 
51 namespace OHOS {
52 namespace Media {
53 extern Surface *g_surface;
54 
ConverFormat(ImageFormat format)55 AvCodecMime ConverFormat(ImageFormat format)
56 {
57     if (format == FORMAT_JPEG) {
58         return MEDIA_MIMETYPE_IMAGE_JPEG;
59     } else if (format == FORMAT_AVC) {
60         return MEDIA_MIMETYPE_VIDEO_AVC;
61     } else if (format == FORMAT_HEVC) {
62         return MEDIA_MIMETYPE_VIDEO_HEVC;
63     } else {
64         return MEDIA_MIMETYPE_INVALID;
65     }
66 }
67 
SetVencSource(CODEC_HANDLETYPE codecHdl,uint32_t deviceId)68 static int32_t SetVencSource(CODEC_HANDLETYPE codecHdl, uint32_t deviceId)
69 {
70     Param param = {.key = KEY_DEVICE_ID, .val = (void *)&deviceId, .size = sizeof(uint32_t)};
71     int32_t ret = CodecSetParameter(codecHdl, &param, 1);
72     if (ret != 0) {
73         MEDIA_ERR_LOG("Set enc source failed.(ret=%d)", ret);
74         return ret;
75     }
76     return MEDIA_OK;
77 }
78 
GetDefaultBitrate(uint32_t width,uint32_t height)79 static uint32_t GetDefaultBitrate(uint32_t width, uint32_t height)
80 {
81     uint32_t rate; /* auto calc bitrate if set 0 */
82     if (width * height == 640 * 360) { /* 640,width  360,height */
83         rate = 0x800; /* 2048kbps */
84     } else if (width * height == 1280 * 720) { /* 1280,width  720,height */
85         rate = 0x400; /* 1024kbps */
86     } else if (width * height >= 2560 * 1440 && width * height <= 2716 * 1524) { /* 2560,2716 width  1440,1524,height */
87         rate = 0x1800; /* 6144kbps */
88     } else if (width * height == 3840 * 2160 || width * height == 4096 * 2160) { /* 3840,4096 width  2160,height */
89         rate = 0xa000; /* 40960kbps */
90     } else {
91         rate = 0x0;
92     }
93     return rate;
94 }
95 
CameraCreateVideoEnc(FrameConfig & fc,StreamAttr stream,uint32_t srcDev,CODEC_HANDLETYPE * codecHdl)96 static int32_t CameraCreateVideoEnc(FrameConfig &fc,
97                                     StreamAttr stream,
98                                     uint32_t srcDev,
99                                     CODEC_HANDLETYPE *codecHdl)
100 {
101     const uint32_t maxParamNum = 10;
102     uint32_t paramIndex = 0;
103     Param param[maxParamNum];
104 
105     CodecType domainKind = VIDEO_ENCODER;
106     param[paramIndex].key = KEY_CODEC_TYPE;
107     param[paramIndex].val = &domainKind;
108     param[paramIndex].size = sizeof(CodecType);
109     paramIndex++;
110 
111     AvCodecMime codecMime = ConverFormat(stream.format);
112     param[paramIndex].key = KEY_MIMETYPE;
113     param[paramIndex].val = &codecMime;
114     param[paramIndex].size = sizeof(AvCodecMime);
115     paramIndex++;
116 
117     VideoCodecRcMode rcMode = VID_CODEC_RC_CBR;
118     param[paramIndex].key = KEY_VIDEO_RC_MODE;
119     param[paramIndex].val = &rcMode;
120     param[paramIndex].size = sizeof(VideoCodecRcMode);
121     paramIndex++;
122 
123     VideoCodecGopMode gopMode = VID_CODEC_GOPMODE_NORMALP;
124     param[paramIndex].key = KEY_VIDEO_GOP_MODE;
125     param[paramIndex].val = &gopMode;
126     param[paramIndex].size = sizeof(VideoCodecGopMode);
127     paramIndex++;
128 
129     Profile profile = HEVC_MAIN_PROFILE;
130     param[paramIndex].key = KEY_VIDEO_PROFILE;
131     param[paramIndex].val = &profile;
132     param[paramIndex].size = sizeof(Profile);
133     paramIndex++;
134 
135 #if (!defined(__LINUX__)) || (defined(ENABLE_PASSTHROUGH_MODE))
136     uint32_t width = stream.width;
137     uint32_t height = stream.height;
138 #else
139     uint32_t width = g_surface->GetWidth();
140     uint32_t height = g_surface->GetHeight();
141 #endif
142 
143     MEDIA_DEBUG_LOG("width=%d", width);
144     param[paramIndex].key = KEY_VIDEO_WIDTH;
145     param[paramIndex].val = &width;
146     param[paramIndex].size = sizeof(uint32_t);
147     paramIndex++;
148 
149     MEDIA_DEBUG_LOG("height=%d", height);
150     param[paramIndex].key = KEY_VIDEO_HEIGHT;
151     param[paramIndex].val = &height;
152     param[paramIndex].size = sizeof(uint32_t);
153     paramIndex++;
154 
155     uint32_t frameRate = stream.fps;
156     MEDIA_DEBUG_LOG("frameRate=%u", frameRate);
157     param[paramIndex].key = KEY_VIDEO_FRAME_RATE;
158     param[paramIndex].val = &frameRate;
159     param[paramIndex].size = sizeof(uint32_t);
160     paramIndex++;
161 
162     uint32_t bitRate = GetDefaultBitrate(width, height);
163     MEDIA_DEBUG_LOG("bitRate=%u kbps", bitRate);
164     param[paramIndex].key = KEY_BITRATE;
165     param[paramIndex].val = &bitRate;
166     param[paramIndex].size = sizeof(uint32_t);
167     paramIndex++;
168 
169     int32_t ret = CodecCreateByType(domainKind, codecMime, codecHdl);
170     if (ret != 0) {
171         MEDIA_ERR_LOG("Create video encoder failed.");
172         return MEDIA_ERR;
173     }
174 
175     ret = CodecSetParameter(*codecHdl, param, paramIndex);
176     if (ret != 0) {
177         CodecDestroy(*codecHdl);
178         MEDIA_ERR_LOG("video CodecSetParameter failed.");
179         return MEDIA_ERR;
180     }
181 
182     ret = SetVencSource(*codecHdl, srcDev);
183     if (ret != 0) {
184         CodecDestroy(*codecHdl);
185         return MEDIA_ERR;
186     }
187 
188     return MEDIA_OK;
189 }
190 
FillParam(Param & param,ParamKey key,uint8_t * data,uint32_t size)191 static void FillParam(Param &param, ParamKey key, uint8_t *data, uint32_t size)
192 {
193     param.key = key;
194     param.val = data;
195     param.size = size;
196 }
197 
CameraCreateJpegEncProc(FrameConfig & fc,uint32_t srcDev,AvCodecMime codecMime,const Param * param,uint32_t paramNum)198 static CODEC_HANDLETYPE CameraCreateJpegEncProc(FrameConfig &fc, uint32_t srcDev, AvCodecMime codecMime,
199     const Param* param, uint32_t paramNum)
200 {
201     CODEC_HANDLETYPE codecHdl = nullptr;
202     if (CodecCreateByType(VIDEO_ENCODER, codecMime, &codecHdl) != 0) {
203         return nullptr;
204     }
205 
206     int32_t ret = CodecSetParameter(codecHdl, param, paramNum);
207     if (ret != 0) {
208         CodecDestroy(codecHdl);
209         return nullptr;
210     }
211 
212     int32_t qfactor = -1;
213     fc.GetParameter(PARAM_KEY_IMAGE_ENCODE_QFACTOR, qfactor);
214     if (qfactor != -1) {
215         Param jpegParam = {
216             .key = KEY_IMAGE_Q_FACTOR,
217             .val = &qfactor,
218             .size = sizeof(qfactor)
219         };
220         ret = CodecSetParameter(codecHdl, &jpegParam, 1);
221         if (ret != 0) {
222             MEDIA_ERR_LOG("CodecSetParameter set jpeg qfactor failed.(ret=%u)", ret);
223         }
224     }
225 
226     ret = SetVencSource(codecHdl, srcDev);
227     if (ret != 0) {
228         MEDIA_ERR_LOG("Set video encoder source failed.");
229         CodecDestroy(codecHdl);
230         return nullptr;
231     }
232     return codecHdl;
233 }
234 
CameraCreateJpegEnc(FrameConfig & fc,StreamAttr stream,uint32_t srcDev,CODEC_HANDLETYPE * codecHdl)235 static int32_t CameraCreateJpegEnc(FrameConfig &fc, StreamAttr stream, uint32_t srcDev, CODEC_HANDLETYPE *codecHdl)
236 {
237     uint32_t maxParamNum = 10; /* 10 maxParamNum */
238     Param param[maxParamNum];
239     uint32_t paramIndex = 0;
240 
241     CodecType domainKind = VIDEO_ENCODER;
242     FillParam(param[paramIndex], KEY_CODEC_TYPE, reinterpret_cast<uint8_t *>(&domainKind), sizeof(CodecType));
243     paramIndex++;
244 
245     AvCodecMime codecMime = ConverFormat(stream.format);
246     FillParam(param[paramIndex], KEY_MIMETYPE, reinterpret_cast<uint8_t *>(&codecMime), sizeof(AvCodecMime));
247     paramIndex++;
248 
249     auto surfaceList = fc.GetSurfaces();
250     Surface *surface = surfaceList.front();
251     uint32_t width = surface->GetWidth();
252     MEDIA_DEBUG_LOG("width=%d", width);
253     FillParam(param[paramIndex], KEY_VIDEO_WIDTH, reinterpret_cast<uint8_t *>(&width), sizeof(uint32_t));
254     paramIndex++;
255 
256     uint32_t height = surface->GetHeight();
257     MEDIA_DEBUG_LOG("height=%d", height);
258     FillParam(param[paramIndex], KEY_VIDEO_HEIGHT, reinterpret_cast<uint8_t *>(&height), sizeof(uint32_t));
259     paramIndex++;
260     if (codecMime == MEDIA_MIMETYPE_VIDEO_HEVC) {
261         VideoCodecRcMode rcMode = VID_CODEC_RC_FIXQP;
262         FillParam(param[paramIndex], KEY_VIDEO_RC_MODE, reinterpret_cast<uint8_t *>(&rcMode), sizeof(VideoCodecRcMode));
263         paramIndex++;
264 
265         Profile profile = HEVC_MAIN_PROFILE;
266         FillParam(param[paramIndex], KEY_VIDEO_PROFILE, reinterpret_cast<uint8_t *>(&profile), sizeof(Profile));
267         paramIndex++;
268 
269         uint32_t frameRate = stream.fps;
270         FillParam(param[paramIndex], KEY_VIDEO_FRAME_RATE, reinterpret_cast<uint8_t *>(&frameRate), sizeof(uint32_t));
271         paramIndex++;
272     }
273     *codecHdl = CameraCreateJpegEncProc(fc, srcDev, codecMime, param, paramIndex);
274     return (*codecHdl != nullptr) ? MEDIA_OK : MEDIA_ERR;
275 }
276 
CopyCodecOutput(uint8_t * dst,uint32_t * size,CodecBuffer * buffer)277 static int32_t CopyCodecOutput(uint8_t *dst, uint32_t *size, CodecBuffer *buffer)
278 {
279     if (dst == nullptr || size == nullptr || buffer == nullptr) {
280         return MEDIA_ERR;
281     }
282     char *dstBuf = reinterpret_cast<char *>(dst);
283     for (uint32_t i = 0; i < buffer->bufferCnt; i++) {
284         uint32_t packSize = buffer->buffer[i].length - buffer->buffer[i].offset;
285         errno_t ret = memcpy_s(dstBuf, *size, (void *)(buffer->buffer[i].buf + buffer->buffer[i].offset), packSize);
286         if (ret != EOK) {
287             return MEDIA_ERR;
288         }
289         *size -= packSize;
290         dstBuf += packSize;
291     }
292     return MEDIA_OK;
293 }
294 
StreamAttrInitialize(StreamAttr * streamAttr,Surface * surface,StreamType streamType,FrameConfig & fc)295 static void StreamAttrInitialize(StreamAttr *streamAttr, Surface *surface,
296                                  StreamType streamType, FrameConfig &fc)
297 {
298     if (streamAttr == nullptr || surface == nullptr) {
299         return;
300     }
301     (void)memset_s(streamAttr, sizeof(StreamAttr), 0, sizeof(StreamAttr));
302     streamAttr->type = streamType;
303     fc.GetParameter(CAM_IMAGE_FORMAT, streamAttr->format);
304     streamAttr->width = surface->GetWidth();
305     streamAttr->height = surface->GetHeight();
306     fc.GetParameter(CAM_FRAME_FPS, streamAttr->fps);
307     fc.GetParameter(CAM_IMAGE_INVERT_MODE, streamAttr->invertMode);
308     fc.GetParameter(CAM_IMAGE_CROP_RECT, streamAttr->crop);
309 }
310 
Convert2HalImageFormat(uint32_t format)311 static ImageFormat Convert2HalImageFormat(uint32_t format)
312 {
313     if (format == CAM_IMAGE_RAW12) {
314         return FORMAT_RGB_BAYER_12BPP;
315     }
316     return FORMAT_YVU420;
317 }
318 
SurfaceSetSize(SurfaceBuffer * surfaceBuf,Surface * surface,uint32_t size)319 static int32_t SurfaceSetSize(SurfaceBuffer* surfaceBuf, Surface* surface, uint32_t size)
320 {
321 #if (!defined(__LINUX__)) || (defined(ENABLE_PASSTHROUGH_MODE))
322     surfaceBuf->SetSize(surface->GetSize() - size);
323     if (surface->FlushBuffer(surfaceBuf) != 0) {
324         MEDIA_ERR_LOG("Flush g_surface failed.");
325         surface->CancelBuffer(surfaceBuf);
326         return -1;
327     }
328 #else
329     surfaceBuf->SetSize(g_surface->GetSize() - size);
330     if (g_surface->FlushBuffer(surfaceBuf) != 0) {
331         MEDIA_ERR_LOG("Flush surface failed.");
332         g_surface->CancelBuffer(surfaceBuf);
333         return -1;
334     }
335 #endif
336     return 0;
337 }
338 
OnVencBufferAvailble(UINTPTR userData,CodecBuffer * outBuf,int32_t * acquireFd)339 int32_t RecordAssistant::OnVencBufferAvailble(UINTPTR userData, CodecBuffer* outBuf, int32_t *acquireFd)
340 {
341     (void)acquireFd;
342     CodecDesc* codecInfo = reinterpret_cast<CodecDesc* >(userData);
343     list<Surface*> *surfaceList = &codecInfo->vencSurfaces_;
344     if (surfaceList == nullptr || surfaceList->empty()) {
345         MEDIA_ERR_LOG("Encoder handle is illegal.");
346         return MEDIA_ERR;
347     }
348     int32_t ret = -1;
349     for (auto &surface : *surfaceList) {
350 #if (!defined(__LINUX__)) || (defined(ENABLE_PASSTHROUGH_MODE))
351         SurfaceBuffer *surfaceBuf = surface->RequestBuffer();
352 #else
353         SurfaceBuffer *surfaceBuf = g_surface->RequestBuffer();
354 #endif
355         if (surfaceBuf == nullptr) {
356             MEDIA_ERR_LOG("No available buffer in surface.");
357             break;
358         }
359 #if (!defined(__LINUX__)) || (defined(ENABLE_PASSTHROUGH_MODE))
360         uint32_t size = surface->GetSize();
361 #else
362         uint32_t size = g_surface->GetSize();
363 #endif
364         void *buf = surfaceBuf->GetVirAddr();
365         if (buf == nullptr) {
366             MEDIA_ERR_LOG("Invalid buffer address.");
367             break;
368         }
369         ret = CopyCodecOutput((uint8_t*)buf, &size, outBuf);
370         if (ret != MEDIA_OK) {
371             MEDIA_ERR_LOG("No available outBuf in surface.");
372 #if (!defined(__LINUX__)) || (defined(ENABLE_PASSTHROUGH_MODE))
373             surface->CancelBuffer(surfaceBuf);
374 #else
375             g_surface->CancelBuffer(surfaceBuf);
376 #endif
377             break;
378         }
379         surfaceBuf->SetInt32(KEY_IS_SYNC_FRAME, (((outBuf->flag & STREAM_FLAG_KEYFRAME) == 0) ? 0 : 1));
380         surfaceBuf->SetInt64(KEY_TIME_US, outBuf->timeStamp);
381         ret = SurfaceSetSize(surfaceBuf, surface, size);
382         if (ret != 0) {
383             break;
384         }
385     }
386     if (CodecQueueOutput(codecInfo->vencHdl_, outBuf, 0, -1) != 0) {
387         MEDIA_ERR_LOG("Codec queue output failed.");
388     }
389     return ret;
390 }
391 
392 CodecCallback RecordAssistant::recordCodecCb_ = {nullptr, nullptr, RecordAssistant::OnVencBufferAvailble};
393 
ClearFrameConfig()394 void RecordAssistant::ClearFrameConfig()
395 {
396     for (uint32_t i = 0; i < codecInfo_.size(); i++) {
397         CodecStop(codecInfo_[i].vencHdl_);
398         CodecDestroy(codecInfo_[i].vencHdl_);
399     }
400     codecInfo_.clear();
401 }
402 
SetFrameConfigEnd(int32_t result)403 int32_t RecordAssistant::SetFrameConfigEnd(int32_t result)
404 {
405     if (result != MEDIA_OK) {
406         for (uint32_t i = 0; i < codecInfo_.size(); i++) {
407             CodecDestroy(codecInfo_[i].vencHdl_);
408         }
409         codecInfo_.clear();
410         return result;
411     }
412     for (uint32_t i = 0; i < codecInfo_.size(); i++) {
413         result = CodecSetCallback(codecInfo_[i].vencHdl_, &recordCodecCb_, reinterpret_cast<UINTPTR>(&codecInfo_[i]));
414         if (result != 0) {
415             MEDIA_ERR_LOG("set CodecSetCallback failed ret:%d", result);
416             CodecDestroy(codecInfo_[i].vencHdl_);
417             break;
418         }
419     }
420 
421     if (result == MEDIA_OK) {
422         state_ = LOOP_READY;
423     } else {
424         for (uint32_t i = 0; i < codecInfo_.size(); i++) {
425             CodecDestroy(codecInfo_[i].vencHdl_);
426         }
427         codecInfo_.clear();
428     }
429     return result;
430 }
431 
SetFrameConfig(FrameConfig & fc,uint32_t * streamId)432 int32_t RecordAssistant::SetFrameConfig(FrameConfig &fc, uint32_t *streamId)
433 {
434     fc_ = &fc;
435     auto surfaceList = fc.GetSurfaces();
436     if (surfaceList.size() > VIDEO_MAX_NUM || surfaceList.size() == 0) {
437         MEDIA_ERR_LOG("the number of surface in frame config must 1 or 2 now.\n");
438         return MEDIA_ERR;
439     }
440     uint32_t num = 0;
441     int32_t ret = MEDIA_OK;
442     for (auto &surface : surfaceList) {
443         CODEC_HANDLETYPE codecHdl = nullptr;
444         StreamAttr stream = {};
445 #if (!defined(__LINUX__)) || (defined(ENABLE_PASSTHROUGH_MODE))
446         StreamAttrInitialize(&stream, surface, STREAM_VIDEO, fc);
447 #else
448         StreamAttrInitialize(&stream, g_surface, STREAM_VIDEO, fc);
449 #endif
450         ret = HalCameraStreamCreate(cameraId_, &stream, streamId);
451         if (ret != MEDIA_OK) {
452             MEDIA_ERR_LOG(" creat recorder stream failed.");
453             ClearFrameConfig();
454             break;
455         }
456         streamId_ = *streamId;
457         streamIdNum_[num] = *streamId;
458         num++;
459 
460         StreamInfo streamInfo;
461         streamInfo.type = STERAM_INFO_PRIVATE;
462         fc.GetVendorParameter(streamInfo.u.data, PRIVATE_TAG_LEN);
463         HalCameraStreamSetInfo(cameraId_, *streamId, &streamInfo);
464 
465         uint32_t deviceId = 0;
466         HalCameraGetDeviceId(cameraId_, *streamId, &deviceId);
467         ret = CameraCreateVideoEnc(fc, stream, deviceId, &codecHdl);
468         if (ret != MEDIA_OK) {
469             MEDIA_ERR_LOG("Cannot create suitble video encoder.");
470             ClearFrameConfig();
471             break;
472         }
473 #if (!defined(__LINUX__)) || (defined(ENABLE_PASSTHROUGH_MODE))
474         list<Surface*> conList({surface});
475 #else
476         list<Surface*> conList({g_surface});
477 #endif
478         CodecDesc info;
479         info.vencHdl_ = codecHdl;
480         info.vencSurfaces_ = conList;
481         codecInfo_.emplace_back(info);
482     }
483     return SetFrameConfigEnd(ret);
484 }
485 
Start(uint32_t streamId)486 int32_t RecordAssistant::Start(uint32_t streamId)
487 {
488     if (state_ != LOOP_READY) {
489         return MEDIA_ERR;
490     }
491     HalCameraStreamOn(cameraId_, streamId);
492     int32_t ret = MEDIA_OK;
493     int32_t i;
494     for (i = 0; static_cast<uint32_t>(i) < codecInfo_.size(); i++) {
495         ret = CodecStart(codecInfo_[i].vencHdl_);
496         if (ret != MEDIA_OK) {
497             MEDIA_ERR_LOG("Video encoder start failed.");
498             ret = MEDIA_ERR;
499             break;
500         }
501     }
502     if (ret == MEDIA_ERR) {
503         /* rollback */
504         for (; i >= 0; i--) {
505             CodecStop(codecInfo_[i].vencHdl_);
506         }
507         return MEDIA_ERR;
508     }
509     state_ = LOOP_LOOPING;
510     MEDIA_INFO_LOG("Start camera recording succeed.");
511     return MEDIA_OK;
512 }
513 
Stop()514 int32_t RecordAssistant::Stop()
515 {
516     if (state_ != LOOP_LOOPING) {
517         return MEDIA_ERR;
518     }
519     ClearFrameConfig();
520     for (uint32_t i = 0; i < VIDEO_MAX_NUM; i++) {
521         if (streamIdNum_[i] != INVALID_STREAM_ID) {
522             HalCameraStreamOff(cameraId_, streamIdNum_[i]);
523             HalCameraStreamDestroy(cameraId_, streamIdNum_[i]);
524         }
525         streamIdNum_[i] = INVALID_STREAM_ID;
526     }
527     state_ = LOOP_STOP;
528     return MEDIA_OK;
529 }
530 
YuvCopyProcess(void * arg)531 void* PreviewAssistant::YuvCopyProcess(void *arg)
532 {
533     return nullptr;
534 }
535 
GetSurfaceRect(Surface * surface,IRect * attr)536 static void GetSurfaceRect(Surface *surface, IRect *attr)
537 {
538     attr->x = std::stoi(surface->GetUserData(string("region_position_x")));
539     attr->y = std::stoi(surface->GetUserData(string("region_position_y")));
540     attr->w = std::stoi(surface->GetUserData(string("region_width")));
541     attr->h = std::stoi(surface->GetUserData(string("region_hegiht")));
542 }
543 
SetFrameConfig(FrameConfig & fc,uint32_t * streamId)544 int32_t PreviewAssistant::SetFrameConfig(FrameConfig &fc, uint32_t *streamId)
545 {
546     fc_ = &fc;
547     auto surfaceList = fc.GetSurfaces();
548     if (surfaceList.size() != 1) {
549         MEDIA_ERR_LOG("Only support one surface in frame config now.");
550         return MEDIA_ERR;
551     }
552     Surface *surface = surfaceList.front();
553     StreamAttr stream = {};
554     StreamAttrInitialize(&stream, surface, STREAM_PREVIEW, fc);
555     int32_t ret = HalCameraStreamCreate(cameraId_, &stream, streamId);
556     if (ret != MEDIA_OK) {
557         MEDIA_ERR_LOG(" creat preview stream failed.");
558         return MEDIA_ERR;
559     }
560     StreamInfo streamInfo;
561     streamInfo.type = STREAM_INFO_POS;
562     streamInfo.u.pos.x = std::stoi(surface->GetUserData(string("region_position_x")));
563     streamInfo.u.pos.y = std::stoi(surface->GetUserData(string("region_position_y")));
564 
565     HalCameraStreamSetInfo(cameraId_, *streamId, &streamInfo);
566     streamId_ = *streamId;
567     return MEDIA_OK;
568 }
569 
Start(uint32_t streamId)570 int32_t PreviewAssistant::Start(uint32_t streamId)
571 {
572     if (state_ == LOOP_LOOPING) {
573         return MEDIA_ERR;
574     }
575     state_ = LOOP_LOOPING;
576 
577     int32_t retCode = pthread_create(&threadId, nullptr, YuvCopyProcess, this);
578     if (retCode != 0) {
579         MEDIA_ERR_LOG("fork thread YuvCopyProcess failed: %d.", retCode);
580     }
581 
582     int32_t ret = HalCameraStreamOn(cameraId_, streamId);
583     if (ret != MEDIA_OK) {
584         MEDIA_ERR_LOG("Preview start failed of HalCameraStreamOn.(ret=%d)", ret);
585         Stop();
586         return MEDIA_ERR;
587     }
588     return MEDIA_OK;
589 }
590 
Stop()591 int32_t PreviewAssistant::Stop()
592 {
593     if (state_ != LOOP_LOOPING) {
594         return MEDIA_ERR;
595     }
596     state_ = LOOP_STOP;
597     pthread_join(threadId, NULL);
598     HalCameraStreamOff(cameraId_, streamId_);
599     HalCameraStreamDestroy(cameraId_, streamId_);
600     return MEDIA_OK;
601 }
602 
SetFrameConfig(FrameConfig & fc,uint32_t * streamId)603 int32_t CaptureAssistant::SetFrameConfig(FrameConfig &fc, uint32_t *streamId)
604 {
605     auto surfaceList = fc.GetSurfaces();
606     if (surfaceList.size() != 1) {
607         MEDIA_ERR_LOG("Only support one surface in frame config now.");
608         return MEDIA_ERR;
609     }
610     if (surfaceList.empty()) {
611         MEDIA_ERR_LOG("Frame config with empty surface list.");
612         return MEDIA_ERR;
613     }
614     if (surfaceList.size() > 1) {
615         MEDIA_WARNING_LOG("Capture only fullfill the first surface in frame config.");
616     }
617     Surface *surface = surfaceList.front();
618 
619     StreamAttr stream = {};
620     StreamAttrInitialize(&stream, surface, STREAM_CAPTURE, fc);
621 
622     uint32_t deviceId = 0;
623     int32_t ret = HalCameraStreamCreate(cameraId_, &stream, streamId);
624     if (ret != MEDIA_OK) {
625         MEDIA_ERR_LOG(" creat capture stream failed.");
626         return MEDIA_ERR;
627     }
628     streamId_ = *streamId;
629     HalCameraGetDeviceId(cameraId_, *streamId, &deviceId);
630     ret = CameraCreateJpegEnc(fc, stream, deviceId, &vencHdl_);
631     if (ret != MEDIA_OK) {
632         MEDIA_ERR_LOG("Create capture venc failed.");
633         return MEDIA_ERR;
634     }
635 
636     capSurface_ = surface;
637     state_ = LOOP_READY;
638     return MEDIA_OK;
639 }
640 
641 /* Block method, waiting for capture completed */
Start(uint32_t streamId)642 int32_t CaptureAssistant::Start(uint32_t streamId)
643 {
644     state_ = LOOP_LOOPING;
645     HalCameraStreamOn(cameraId_, streamId);
646     int32_t ret = CodecStart(vencHdl_);
647     if (ret != 0) {
648         MEDIA_ERR_LOG("Start capture encoder failed.(ret=%d)", ret);
649         state_ = LOOP_STOP;
650         return MEDIA_ERR;
651     }
652 
653     CodecBuffer* outInfo = (CodecBuffer*)new char[sizeof(CodecBuffer) + sizeof(CodecBufferInfo) * 3]; /* 3 buffCnt */
654     if (outInfo == NULL) {
655         MEDIA_ERR_LOG("malloc Dequeue buffer failed!");
656         return MEDIA_ERR;
657     }
658     SurfaceBuffer *surfaceBuf = NULL;
659     do {
660         if (memset_s(outInfo, sizeof(CodecBuffer) + sizeof(CodecBufferInfo) * 0x3, 0,
661             sizeof(CodecBuffer) + sizeof(CodecBufferInfo) * 3) != MEDIA_OK) { /* 3 buffCnt */
662             MEDIA_ERR_LOG("memset_s failed!");
663             delete(outInfo);
664             return MEDIA_ERR;
665         }
666         outInfo->bufferCnt = 3; /* 3 buffCnt */
667         ret = CodecDequeueOutput(vencHdl_, 0, nullptr, outInfo);
668         if (ret != 0) {
669             MEDIA_ERR_LOG("Dequeue capture frame failed.(ret=%d)", ret);
670             break;
671         }
672 
673         surfaceBuf = capSurface_->RequestBuffer();
674         if (surfaceBuf == NULL) {
675             break;
676         }
677 
678         uint32_t size = capSurface_->GetSize();
679         void *buf = surfaceBuf->GetVirAddr();
680         if (buf == nullptr) {
681             MEDIA_ERR_LOG("Invalid buffer address.");
682             break;
683         }
684         if (CopyCodecOutput((uint8_t*)buf, &size, outInfo) != MEDIA_OK) {
685             MEDIA_ERR_LOG("No available buffer in capSurface_.");
686             break;
687         }
688         surfaceBuf->SetSize(capSurface_->GetSize() - size);
689         if (capSurface_->FlushBuffer(surfaceBuf) != 0) {
690             MEDIA_ERR_LOG("Flush surface buffer failed.");
691             break;
692         }
693     } while (0);
694 
695     CodecStop(vencHdl_);
696     CodecDestroy(vencHdl_);
697     HalCameraStreamOff(cameraId_, streamId);
698     HalCameraStreamDestroy(cameraId_, streamId);
699     delete outInfo;
700     outInfo = NULL;
701     state_ = LOOP_STOP;
702 
703     return ret;
704 }
705 
Stop()706 int32_t CaptureAssistant::Stop()
707 {
708     MEDIA_DEBUG_LOG("No support method.");
709     return MEDIA_OK;
710 }
711 
SetFrameConfig(FrameConfig & fc,uint32_t * streamId)712 int32_t CallbackAssistant::SetFrameConfig(FrameConfig &fc, uint32_t *streamId)
713 {
714     fc_ = &fc;
715     auto surfaceList = fc.GetSurfaces();
716     if (surfaceList.size() != 1) {
717         MEDIA_ERR_LOG("Only support one surface in frame config now.");
718         return MEDIA_ERR;
719     }
720     uint32_t imageFormat = 0;
721     fc.GetParameter(CAM_IMAGE_FORMAT, imageFormat);
722     ImageFormat halImageFormat = Convert2HalImageFormat(imageFormat);
723     MEDIA_INFO_LOG("Imageformat is %d", imageFormat);
724     Surface *surface = surfaceList.front();
725     StreamAttr stream = {};
726     StreamAttrInitialize(&stream, surface, STREAM_CALLBACK, fc);
727     stream.format = halImageFormat;
728     int32_t ret = HalCameraStreamCreate(cameraId_, &stream, streamId);
729     if (ret != MEDIA_OK) {
730         MEDIA_ERR_LOG(" creat callback stream failed.");
731         return MEDIA_ERR;
732     }
733     streamId_ = *streamId;
734     capSurface_ = surface;
735     state_ = LOOP_READY;
736     return MEDIA_OK;
737 }
738 
Start(uint32_t streamId)739 int32_t CallbackAssistant::Start(uint32_t streamId)
740 {
741     if (state_ == LOOP_LOOPING) {
742         return MEDIA_ERR;
743     }
744     state_ = LOOP_LOOPING;
745     int32_t retCode = pthread_create(&threadId, nullptr, StreamCopyProcess, this);
746     if (retCode != 0) {
747         MEDIA_ERR_LOG("fork thread StreamCopyProcess failed: %d.", retCode);
748     }
749     HalCameraStreamOn(cameraId_, streamId);
750     return MEDIA_OK;
751 }
752 
StreamCopyProcess(void * arg)753 void* CallbackAssistant::StreamCopyProcess(void *arg)
754 {
755     CallbackAssistant *assistant = (CallbackAssistant *)arg;
756     if (assistant == nullptr) {
757         MEDIA_ERR_LOG("CallbackAssistant create failed.");
758         return nullptr;
759     }
760     if (assistant->capSurface_ == nullptr) {
761         MEDIA_ERR_LOG("capSurface_ is null.\n");
762         return nullptr;
763     }
764 
765     int32_t ret;
766     HalBuffer streamBuffer;
767     (void)memset_s(&streamBuffer, sizeof(HalBuffer), 0, sizeof(HalBuffer));
768     while (assistant->state_ == LOOP_LOOPING) {
769         SurfaceBuffer *surfaceBuf = assistant->capSurface_->RequestBuffer();
770         if (surfaceBuf == nullptr) {
771             usleep(DELAY_TIME_ONE_FRAME);
772             continue;
773         }
774 
775         if (streamBuffer.size != 0x0) {
776             HalCameraQueueBuf(assistant->cameraId_, assistant->streamId_, &streamBuffer);
777             (void)memset_s(&streamBuffer, sizeof(HalBuffer), 0, sizeof(HalBuffer));
778         }
779         streamBuffer.format = FORMAT_PRIVATE;
780         streamBuffer.size = assistant->capSurface_->GetSize();
781         if (surfaceBuf->GetVirAddr() == NULL) {
782             MEDIA_ERR_LOG("Invalid buffer address.");
783             break;
784         }
785         streamBuffer.virAddr = surfaceBuf->GetVirAddr();
786 
787         ret = HalCameraDequeueBuf(assistant->cameraId_, assistant->streamId_, &streamBuffer);
788         if (ret != MEDIA_OK) {
789             usleep(DELAY_TIME_ONE_FRAME);
790             continue;
791         }
792 
793         if (assistant->capSurface_->FlushBuffer(surfaceBuf) != 0) {
794             MEDIA_ERR_LOG("Flush surface failed.");
795             assistant->capSurface_->CancelBuffer(surfaceBuf);
796             break;
797         }
798         usleep(DELAY_TIME_ONE_FRAME);
799     }
800     if (streamBuffer.size != 0x0) {
801         HalCameraQueueBuf(assistant->cameraId_, assistant->streamId_, &streamBuffer);
802     }
803     MEDIA_DEBUG_LOG(" yuv thread joined \n");
804     return nullptr;
805 }
806 
Stop()807 int32_t CallbackAssistant::Stop()
808 {
809     if (state_ != LOOP_LOOPING) {
810         return MEDIA_ERR;
811     }
812     state_ = LOOP_STOP;
813     pthread_join(threadId, NULL);
814     HalCameraStreamOff(cameraId_, streamId_);
815     HalCameraStreamDestroy(cameraId_, streamId_);
816     return MEDIA_OK;
817 }
818 
CameraDevice()819 CameraDevice::CameraDevice() {}
CameraDevice(uint32_t cameraId)820 CameraDevice::CameraDevice(uint32_t cameraId)
821 {
822     this->cameraId = cameraId;
823 }
824 
~CameraDevice()825 CameraDevice::~CameraDevice() {}
826 
Initialize()827 int32_t CameraDevice::Initialize()
828 {
829     // Need to be Refactored when delete config file
830     int32_t ret = CodecInit();
831     if (ret != 0) {
832         MEDIA_ERR_LOG("Codec module init failed.(ret=%d)", ret);
833         return MEDIA_ERR;
834     }
835     MEDIA_INFO_LOG("Codec module init succeed.");
836     captureAssistant_.state_ = LOOP_READY;
837     previewAssistant_.state_ = LOOP_READY;
838     recordAssistant_.state_ = LOOP_READY;
839     callbackAssistant_.state_ = LOOP_READY;
840     captureAssistant_.cameraId_ = cameraId;
841     previewAssistant_.cameraId_ = cameraId;
842     recordAssistant_.cameraId_ = cameraId;
843     callbackAssistant_.cameraId_ = cameraId;
844     return MEDIA_OK;
845 }
846 
UnInitialize()847 int32_t CameraDevice::UnInitialize()
848 {
849     return MEDIA_OK;
850 }
851 
TriggerLoopingCapture(FrameConfig & fc,uint32_t * streamId)852 int32_t CameraDevice::TriggerLoopingCapture(FrameConfig &fc, uint32_t *streamId)
853 {
854     MEDIA_DEBUG_LOG("Camera device start looping capture.");
855     DeviceAssistant *assistant = nullptr;
856     int32_t fcType = fc.GetFrameConfigType();
857     switch (fcType) {
858         case FRAME_CONFIG_RECORD:
859             assistant = &recordAssistant_;
860             break;
861         case FRAME_CONFIG_PREVIEW:
862             assistant = &previewAssistant_;
863             break;
864         case FRAME_CONFIG_CAPTURE:
865             assistant = &captureAssistant_;
866             break;
867         case FRAME_CONFIG_CALLBACK:
868             assistant = &callbackAssistant_;
869             break;
870         default:
871             break;
872     }
873     if (assistant == nullptr) {
874         MEDIA_ERR_LOG("Invalid frame config type.(type=%d)", fcType);
875         return MEDIA_ERR;
876     }
877     if (assistant->state_ == LOOP_IDLE || assistant->state_ == LOOP_LOOPING || assistant->state_ == LOOP_ERROR) {
878         MEDIA_ERR_LOG("Device state is %d, cannot start looping capture.", assistant->state_);
879         return MEDIA_ERR;
880     }
881     uint8_t count = 1;
882     if (fcType == FRAME_CONFIG_CAPTURE) {
883         auto surfaceList = fc.GetSurfaces();
884         if (surfaceList.size() != 1) {
885             MEDIA_ERR_LOG("Only support one surface in frame config now");
886             return MEDIA_ERR;
887         }
888         Surface* surface = surfaceList.front();
889         count = surface->GetQueueSize();
890     }
891 
892     do {
893         int32_t ret = assistant->SetFrameConfig(fc, streamId);
894         if (ret != MEDIA_OK) {
895             MEDIA_ERR_LOG("Check and set frame config failed (ret=%d)", ret);
896             return MEDIA_ERR;
897         }
898         ret = assistant->Start(*streamId);
899         if (ret != MEDIA_OK) {
900             MEDIA_ERR_LOG("Start looping capture failed (ret=%d)", ret);
901             return MEDIA_ERR;
902         }
903     } while (--count);
904     return MEDIA_OK;
905 }
906 
StopLoopingCapture(int32_t type)907 void CameraDevice::StopLoopingCapture(int32_t type)
908 {
909     MEDIA_INFO_LOG("Stop looping capture in camera_device.cpp");
910 
911     switch (type) {
912         case FRAME_CONFIG_RECORD:
913             MEDIA_INFO_LOG("Stop recorder");
914             recordAssistant_.Stop();;
915             break;
916         case FRAME_CONFIG_PREVIEW:
917             MEDIA_INFO_LOG("Stop preview");
918             previewAssistant_.Stop();
919             break;
920         case FRAME_CONFIG_CALLBACK:
921             MEDIA_INFO_LOG("Stop callback");
922             callbackAssistant_.Stop();
923             break;
924         default:
925             MEDIA_INFO_LOG("Stop all");
926             previewAssistant_.Stop();
927             recordAssistant_.Stop();
928             callbackAssistant_.Stop();
929             break;
930     }
931 }
932 
TriggerSingleCapture(FrameConfig & fc,uint32_t * streamId)933 int32_t CameraDevice::TriggerSingleCapture(FrameConfig &fc, uint32_t *streamId)
934 {
935     return TriggerLoopingCapture(fc, streamId);
936 }
937 
SetCameraConfig()938 int32_t CameraDevice::SetCameraConfig()
939 {
940     return MEDIA_OK;
941 }
942 } // namespace Media
943 } // namespace OHOS
944