1 /*
2  * Copyright (C) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "codec_utils.h"
17 #include "avcodec_log.h"
18 #include "media_description.h"
19 namespace OHOS {
20 namespace MediaAVCodec {
21 namespace Codec {
22 namespace {
23 constexpr OHOS::HiviewDFX::HiLogLabel LABEL = {LOG_CORE, LOG_DOMAIN_FRAMEWORK, "FCodec"};
24 constexpr uint32_t INDEX_ARRAY = 2;
25 std::map<VideoPixelFormat, AVPixelFormat> g_pixelFormatMap = {
26     {VideoPixelFormat::YUVI420, AV_PIX_FMT_YUV420P},
27     {VideoPixelFormat::NV12, AV_PIX_FMT_NV12},
28     {VideoPixelFormat::NV21, AV_PIX_FMT_NV21},
29     {VideoPixelFormat::RGBA, AV_PIX_FMT_RGBA},
30 };
31 } // namespace
32 
33 using namespace OHOS::Media;
ConvertVideoFrame(std::shared_ptr<Scale> * scale,std::shared_ptr<AVFrame> frame,uint8_t ** dstData,int32_t * dstLineSize,AVPixelFormat dstPixFmt)34 int32_t ConvertVideoFrame(std::shared_ptr<Scale> *scale, std::shared_ptr<AVFrame> frame, uint8_t **dstData,
35                           int32_t *dstLineSize, AVPixelFormat dstPixFmt)
36 {
37     if (*scale == nullptr) {
38         *scale = std::make_shared<Scale>();
39         ScalePara scalePara{static_cast<int32_t>(frame->width),        static_cast<int32_t>(frame->height),
40                             static_cast<AVPixelFormat>(frame->format), static_cast<int32_t>(frame->width),
41                             static_cast<int32_t>(frame->height),       dstPixFmt};
42         CHECK_AND_RETURN_RET_LOG((*scale)->Init(scalePara, dstData, dstLineSize) == AVCS_ERR_OK, AVCS_ERR_UNKNOWN,
43                                  "Scale init error");
44     }
45     return (*scale)->Convert(frame->data, frame->linesize, dstData, dstLineSize);
46 }
47 
WriteYuvDataStride(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t stride,const Format & format)48 int32_t WriteYuvDataStride(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
49                            int32_t stride, const Format &format)
50 {
51     int32_t height;
52     int32_t fmt;
53     format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
54     format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt);
55     VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
56     CHECK_AND_RETURN_RET_LOG(pixFmt == VideoPixelFormat::YUVI420 || pixFmt == VideoPixelFormat::NV12 ||
57                                  pixFmt == VideoPixelFormat::NV21,
58                              AVCS_ERR_UNSUPPORT, "pixFmt: %{public}d do not support", pixFmt);
59     int32_t srcPos = 0;
60     int32_t dstPos = 0;
61     int32_t dataSize = scaleLineSize[0];
62     int32_t writeSize = dataSize > stride ? stride : dataSize;
63     for (int32_t colNum = 0; colNum < height; colNum++) {
64         memory->Write(scaleData[0] + srcPos, writeSize, dstPos);
65         dstPos += stride;
66         srcPos += dataSize;
67     }
68     srcPos = 0;
69     if (pixFmt == VideoPixelFormat::YUVI420) {
70         dataSize = scaleLineSize[1];
71         writeSize = dataSize > (stride / UV_SCALE_FACTOR) ? (stride / UV_SCALE_FACTOR) : dataSize;
72         for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) {
73             memory->Write(scaleData[1] + srcPos, writeSize, dstPos);
74             dstPos += (stride / UV_SCALE_FACTOR);
75             srcPos += dataSize;
76         }
77         srcPos = 0;
78         for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) {
79             memory->Write(scaleData[INDEX_ARRAY] + srcPos, writeSize, dstPos);
80             dstPos += (stride / UV_SCALE_FACTOR);
81             srcPos += dataSize;
82         }
83     } else if ((pixFmt == VideoPixelFormat::NV12) || (pixFmt == VideoPixelFormat::NV21)) {
84         dataSize = scaleLineSize[1];
85         writeSize = dataSize > stride ? stride : dataSize;
86         for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) {
87             memory->Write(scaleData[1] + srcPos, writeSize, dstPos);
88             dstPos += stride;
89             srcPos += dataSize;
90         }
91     }
92     AVCODEC_LOGD("WriteYuvDataStride success");
93     return AVCS_ERR_OK;
94 }
95 
WriteRgbDataStride(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t stride,const Format & format)96 int32_t WriteRgbDataStride(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
97                            int32_t stride, const Format &format)
98 {
99     int32_t height;
100     format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
101     int32_t srcPos = 0;
102     int32_t dstPos = 0;
103     int32_t dataSize = scaleLineSize[0];
104     int32_t writeSize = dataSize > stride ? stride : dataSize;
105     for (int32_t colNum = 0; colNum < height; colNum++) {
106         memory->Write(scaleData[0] + srcPos, writeSize, dstPos);
107         dstPos += stride;
108         srcPos += dataSize;
109     }
110 
111     AVCODEC_LOGD("WriteRgbDataStride success");
112     return AVCS_ERR_OK;
113 }
114 
WriteYuvData(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t & height,VideoPixelFormat & pixFmt)115 int32_t WriteYuvData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
116                      int32_t &height, VideoPixelFormat &pixFmt)
117 {
118     int32_t ySize = static_cast<int32_t>(scaleLineSize[0] * height);      // yuv420: 411 nv21
119     int32_t uvSize = static_cast<int32_t>(scaleLineSize[1] * height / 2); // 2
120     int32_t frameSize = 0;
121     if (pixFmt == VideoPixelFormat::YUVI420) {
122         frameSize = ySize + (uvSize * 2); // 2
123     } else if (pixFmt == VideoPixelFormat::NV21 || pixFmt == VideoPixelFormat::NV12) {
124         frameSize = ySize + uvSize;
125     }
126     CHECK_AND_RETURN_RET_LOG(memory->GetCapacity() >= frameSize, AVCS_ERR_NO_MEMORY,
127                              "output buffer size is not enough: real[%{public}d], need[%{public}u]",
128                              memory->GetCapacity(), frameSize);
129     if (pixFmt == VideoPixelFormat::YUVI420) {
130         memory->Write(scaleData[0], ySize);
131         memory->Write(scaleData[1], uvSize);
132         memory->Write(scaleData[2], uvSize); // 2
133     } else if ((pixFmt == VideoPixelFormat::NV12) || (pixFmt == VideoPixelFormat::NV21)) {
134         memory->Write(scaleData[0], ySize);
135         memory->Write(scaleData[1], uvSize);
136     } else {
137         return AVCS_ERR_UNSUPPORT;
138     }
139     return AVCS_ERR_OK;
140 }
141 
WriteRgbData(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t & height)142 int32_t WriteRgbData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
143                      int32_t &height)
144 {
145     int32_t frameSize = static_cast<int32_t>(scaleLineSize[0] * height);
146     CHECK_AND_RETURN_RET_LOG(memory->GetCapacity() >= frameSize, AVCS_ERR_NO_MEMORY,
147                              "output buffer size is not enough: real[%{public}d], need[%{public}u]",
148                              memory->GetCapacity(), frameSize);
149     memory->Write(scaleData[0], frameSize);
150     return AVCS_ERR_OK;
151 }
152 
WriteSurfaceData(const std::shared_ptr<AVMemory> & memory,struct SurfaceInfo & surfaceInfo,const Format & format)153 int32_t WriteSurfaceData(const std::shared_ptr<AVMemory> &memory, struct SurfaceInfo &surfaceInfo, const Format &format)
154 {
155     int32_t height;
156     int32_t fmt;
157     format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
158     format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt);
159     VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
160     if (surfaceInfo.surfaceFence != nullptr) {
161         surfaceInfo.surfaceFence->Wait(100); // 100ms
162     }
163     uint32_t yScaleLineSize = static_cast<uint32_t>(surfaceInfo.scaleLineSize[0]);
164     if (IsYuvFormat(pixFmt)) {
165         if (surfaceInfo.surfaceStride % yScaleLineSize) {
166             return WriteYuvDataStride(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize,
167                                       surfaceInfo.surfaceStride, format);
168         }
169         WriteYuvData(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize, height, pixFmt);
170     } else if (IsRgbFormat(pixFmt)) {
171         if (surfaceInfo.surfaceStride % yScaleLineSize) {
172             return WriteRgbDataStride(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize,
173                                       surfaceInfo.surfaceStride, format);
174         }
175         WriteRgbData(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize, height);
176     } else {
177         AVCODEC_LOGE("Fill frame buffer failed : unsupported pixel format: %{public}d", pixFmt);
178         return AVCS_ERR_UNSUPPORT;
179     }
180     return AVCS_ERR_OK;
181 }
182 
WriteBufferData(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,int32_t * scaleLineSize,const Format & format)183 int32_t WriteBufferData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, int32_t *scaleLineSize,
184                         const Format &format)
185 {
186     int32_t height;
187     int32_t width;
188     int32_t fmt;
189     format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
190     format.GetIntValue(MediaDescriptionKey::MD_KEY_WIDTH, width);
191     format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt);
192     VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
193 
194     if (IsYuvFormat(pixFmt)) {
195         if (scaleLineSize[0] % width) {
196             return WriteYuvDataStride(memory, scaleData, scaleLineSize, width, format);
197         }
198         WriteYuvData(memory, scaleData, scaleLineSize, height, pixFmt);
199     } else if (IsRgbFormat(pixFmt)) {
200         if (scaleLineSize[0] % width) {
201             return WriteRgbDataStride(memory, scaleData, scaleLineSize, width * VIDEO_PIX_DEPTH_RGBA, format);
202         }
203         WriteRgbData(memory, scaleData, scaleLineSize, height);
204     } else {
205         AVCODEC_LOGE("Fill frame buffer failed : unsupported pixel format: %{public}d", pixFmt);
206         return AVCS_ERR_UNSUPPORT;
207     }
208     return AVCS_ERR_OK;
209 }
210 
AVStrError(int errnum)211 std::string AVStrError(int errnum)
212 {
213     char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
214     av_strerror(errnum, errbuf, AV_ERROR_MAX_STRING_SIZE);
215     return std::string(errbuf);
216 }
217 
TranslateSurfaceRotation(const VideoRotation & rotation)218 GraphicTransformType TranslateSurfaceRotation(const VideoRotation &rotation)
219 {
220     switch (rotation) {
221         case VideoRotation::VIDEO_ROTATION_90: {
222             return GRAPHIC_ROTATE_270;
223         }
224         case VideoRotation::VIDEO_ROTATION_180: {
225             return GRAPHIC_ROTATE_180;
226         }
227         case VideoRotation::VIDEO_ROTATION_270: {
228             return GRAPHIC_ROTATE_90;
229         }
230         default:
231             return GRAPHIC_ROTATE_NONE;
232     }
233 }
234 
TranslateSurfaceFormat(const VideoPixelFormat & surfaceFormat)235 GraphicPixelFormat TranslateSurfaceFormat(const VideoPixelFormat &surfaceFormat)
236 {
237     switch (surfaceFormat) {
238         case VideoPixelFormat::YUVI420: {
239             return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_P;
240         }
241         case VideoPixelFormat::RGBA: {
242             return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_8888;
243         }
244         case VideoPixelFormat::NV12: {
245             return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_SP;
246         }
247         case VideoPixelFormat::NV21: {
248             return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCRCB_420_SP;
249         }
250         default:
251             return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_BUTT;
252     }
253 }
254 
ConvertPixelFormatFromFFmpeg(int32_t ffmpegPixelFormat)255 VideoPixelFormat ConvertPixelFormatFromFFmpeg(int32_t ffmpegPixelFormat)
256 {
257     auto iter = std::find_if(
258         g_pixelFormatMap.begin(), g_pixelFormatMap.end(),
259         [&](const std::pair<VideoPixelFormat, AVPixelFormat> &tmp) -> bool { return tmp.second == ffmpegPixelFormat; });
260     return iter == g_pixelFormatMap.end() ? VideoPixelFormat::UNKNOWN : iter->first;
261 }
262 
ConvertPixelFormatToFFmpeg(VideoPixelFormat pixelFormat)263 AVPixelFormat ConvertPixelFormatToFFmpeg(VideoPixelFormat pixelFormat)
264 {
265     auto iter = std::find_if(
266         g_pixelFormatMap.begin(), g_pixelFormatMap.end(),
267         [&](const std::pair<VideoPixelFormat, AVPixelFormat> &tmp) -> bool { return tmp.first == pixelFormat; });
268     return iter == g_pixelFormatMap.end() ? AV_PIX_FMT_NONE : iter->second;
269 }
270 
IsYuvFormat(VideoPixelFormat & format)271 bool IsYuvFormat(VideoPixelFormat &format)
272 {
273     return (format == VideoPixelFormat::YUVI420 || format == VideoPixelFormat::NV12 ||
274             format == VideoPixelFormat::NV21);
275 }
276 
IsRgbFormat(VideoPixelFormat & format)277 bool IsRgbFormat(VideoPixelFormat &format)
278 {
279     return (format == VideoPixelFormat::RGBA);
280 }
281 
Init(const ScalePara & scalePara,uint8_t ** dstData,int32_t * dstLineSize)282 int32_t Scale::Init(const ScalePara &scalePara, uint8_t **dstData, int32_t *dstLineSize)
283 {
284     scalePara_ = scalePara;
285     if (swsCtx_ != nullptr) {
286         return AVCS_ERR_OK;
287     }
288     auto swsContext =
289         sws_getContext(scalePara_.srcWidth, scalePara_.srcHeight, scalePara_.srcFfFmt, scalePara_.dstWidth,
290                        scalePara_.dstHeight, scalePara_.dstFfFmt, SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
291     if (swsContext == nullptr) {
292         return AVCS_ERR_UNKNOWN;
293     }
294     swsCtx_ = std::shared_ptr<SwsContext>(swsContext, [](struct SwsContext *ptr) {
295         if (ptr != nullptr) {
296             sws_freeContext(ptr);
297         }
298     });
299     auto ret = av_image_alloc(dstData, dstLineSize, scalePara_.dstWidth, scalePara_.dstHeight, scalePara_.dstFfFmt,
300                               scalePara_.align);
301     if (ret < 0) {
302         return AVCS_ERR_UNKNOWN;
303     }
304     for (int32_t i = 0; dstLineSize[i] > 0; i++) {
305         if (dstData[i] && !dstLineSize[i]) {
306             return AVCS_ERR_UNKNOWN;
307         }
308     }
309     return AVCS_ERR_OK;
310 }
311 
Convert(uint8_t ** srcData,const int32_t * srcLineSize,uint8_t ** dstData,int32_t * dstLineSize)312 int32_t Scale::Convert(uint8_t **srcData, const int32_t *srcLineSize, uint8_t **dstData, int32_t *dstLineSize)
313 {
314     auto res = sws_scale(swsCtx_.get(), srcData, srcLineSize, 0, scalePara_.srcHeight, dstData, dstLineSize);
315     if (res < 0) {
316         return AVCS_ERR_UNKNOWN;
317     }
318     return AVCS_ERR_OK;
319 }
320 } // namespace Codec
321 } // namespace MediaAVCodec
322 } // namespace OHOS