1 /*
2 * Copyright (C) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "codec_utils.h"
17 #include "avcodec_log.h"
18 #include "media_description.h"
19 namespace OHOS {
20 namespace MediaAVCodec {
21 namespace Codec {
22 namespace {
23 constexpr OHOS::HiviewDFX::HiLogLabel LABEL = {LOG_CORE, LOG_DOMAIN_FRAMEWORK, "FCodec"};
24 constexpr uint32_t INDEX_ARRAY = 2;
25 std::map<VideoPixelFormat, AVPixelFormat> g_pixelFormatMap = {
26 {VideoPixelFormat::YUVI420, AV_PIX_FMT_YUV420P},
27 {VideoPixelFormat::NV12, AV_PIX_FMT_NV12},
28 {VideoPixelFormat::NV21, AV_PIX_FMT_NV21},
29 {VideoPixelFormat::RGBA, AV_PIX_FMT_RGBA},
30 };
31 } // namespace
32
33 using namespace OHOS::Media;
ConvertVideoFrame(std::shared_ptr<Scale> * scale,std::shared_ptr<AVFrame> frame,uint8_t ** dstData,int32_t * dstLineSize,AVPixelFormat dstPixFmt)34 int32_t ConvertVideoFrame(std::shared_ptr<Scale> *scale, std::shared_ptr<AVFrame> frame, uint8_t **dstData,
35 int32_t *dstLineSize, AVPixelFormat dstPixFmt)
36 {
37 if (*scale == nullptr) {
38 *scale = std::make_shared<Scale>();
39 ScalePara scalePara{static_cast<int32_t>(frame->width), static_cast<int32_t>(frame->height),
40 static_cast<AVPixelFormat>(frame->format), static_cast<int32_t>(frame->width),
41 static_cast<int32_t>(frame->height), dstPixFmt};
42 CHECK_AND_RETURN_RET_LOG((*scale)->Init(scalePara, dstData, dstLineSize) == AVCS_ERR_OK, AVCS_ERR_UNKNOWN,
43 "Scale init error");
44 }
45 return (*scale)->Convert(frame->data, frame->linesize, dstData, dstLineSize);
46 }
47
ConvertVideoFrame(std::shared_ptr<Scale> * scale,uint8_t ** srcData,int32_t * srcLineSize,AVPixelFormat srcPixFmt,int32_t srcWidth,int32_t srcHeight,uint8_t ** dstData,int32_t * dstLineSize,AVPixelFormat dstPixFmt)48 int32_t ConvertVideoFrame(std::shared_ptr<Scale> *scale,
49 uint8_t **srcData, int32_t *srcLineSize, AVPixelFormat srcPixFmt,
50 int32_t srcWidth, int32_t srcHeight,
51 uint8_t **dstData, int32_t *dstLineSize, AVPixelFormat dstPixFmt)
52 {
53 if (*scale == nullptr) {
54 *scale = std::make_shared<Scale>();
55 ScalePara scalePara{srcWidth, srcHeight, srcPixFmt,
56 srcWidth, srcHeight, dstPixFmt};
57 CHECK_AND_RETURN_RET_LOG((*scale)->Init(scalePara, dstData, dstLineSize) == AVCS_ERR_OK, AVCS_ERR_UNKNOWN,
58 "Scale init error");
59 }
60 return (*scale)->Convert(srcData, srcLineSize, dstData, dstLineSize);
61 }
62
WriteYuvDataStride(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t stride,const Format & format)63 int32_t WriteYuvDataStride(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
64 int32_t stride, const Format &format)
65 {
66 int32_t height;
67 int32_t fmt;
68 format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
69 format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt);
70 VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
71 CHECK_AND_RETURN_RET_LOG(pixFmt == VideoPixelFormat::YUVI420 || pixFmt == VideoPixelFormat::NV12 ||
72 pixFmt == VideoPixelFormat::NV21,
73 AVCS_ERR_UNSUPPORT, "pixFmt: %{public}d do not support", pixFmt);
74 int32_t srcPos = 0;
75 int32_t dstPos = 0;
76 int32_t dataSize = scaleLineSize[0];
77 int32_t writeSize = dataSize > stride ? stride : dataSize;
78 for (int32_t colNum = 0; colNum < height; colNum++) {
79 memory->Write(scaleData[0] + srcPos, writeSize, dstPos);
80 dstPos += stride;
81 srcPos += dataSize;
82 }
83 srcPos = 0;
84 if (pixFmt == VideoPixelFormat::YUVI420) {
85 dataSize = scaleLineSize[1];
86 writeSize = dataSize > (stride / UV_SCALE_FACTOR) ? (stride / UV_SCALE_FACTOR) : dataSize;
87 for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) {
88 memory->Write(scaleData[1] + srcPos, writeSize, dstPos);
89 dstPos += (stride / UV_SCALE_FACTOR);
90 srcPos += dataSize;
91 }
92 srcPos = 0;
93 for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) {
94 memory->Write(scaleData[INDEX_ARRAY] + srcPos, writeSize, dstPos);
95 dstPos += (stride / UV_SCALE_FACTOR);
96 srcPos += dataSize;
97 }
98 } else if ((pixFmt == VideoPixelFormat::NV12) || (pixFmt == VideoPixelFormat::NV21)) {
99 dataSize = scaleLineSize[1];
100 writeSize = dataSize > stride ? stride : dataSize;
101 for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) {
102 memory->Write(scaleData[1] + srcPos, writeSize, dstPos);
103 dstPos += stride;
104 srcPos += dataSize;
105 }
106 }
107 AVCODEC_LOGD("WriteYuvDataStride success");
108 return AVCS_ERR_OK;
109 }
110
WriteRgbDataStride(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t stride,const Format & format)111 int32_t WriteRgbDataStride(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
112 int32_t stride, const Format &format)
113 {
114 int32_t height;
115 format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
116 int32_t srcPos = 0;
117 int32_t dstPos = 0;
118 int32_t dataSize = scaleLineSize[0];
119 int32_t writeSize = dataSize > stride ? stride : dataSize;
120 for (int32_t colNum = 0; colNum < height; colNum++) {
121 memory->Write(scaleData[0] + srcPos, writeSize, dstPos);
122 dstPos += stride;
123 srcPos += dataSize;
124 }
125
126 AVCODEC_LOGD("WriteRgbDataStride success");
127 return AVCS_ERR_OK;
128 }
129
WriteYuvData(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t & height,VideoPixelFormat & pixFmt)130 int32_t WriteYuvData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
131 int32_t &height, VideoPixelFormat &pixFmt)
132 {
133 int32_t ySize = static_cast<int32_t>(scaleLineSize[0] * height); // yuv420: 411 nv21
134 int32_t uvSize = static_cast<int32_t>(scaleLineSize[1] * height / 2); // 2
135 int32_t frameSize = 0;
136 if (pixFmt == VideoPixelFormat::YUVI420) {
137 frameSize = ySize + (uvSize * 2); // 2
138 } else if (pixFmt == VideoPixelFormat::NV21 || pixFmt == VideoPixelFormat::NV12) {
139 frameSize = ySize + uvSize;
140 }
141 CHECK_AND_RETURN_RET_LOG(memory->GetCapacity() >= frameSize, AVCS_ERR_NO_MEMORY,
142 "output buffer size is not enough: real[%{public}d], need[%{public}u]",
143 memory->GetCapacity(), frameSize);
144 if (pixFmt == VideoPixelFormat::YUVI420) {
145 memory->Write(scaleData[0], ySize);
146 memory->Write(scaleData[1], uvSize);
147 memory->Write(scaleData[2], uvSize); // 2
148 } else if ((pixFmt == VideoPixelFormat::NV12) || (pixFmt == VideoPixelFormat::NV21)) {
149 memory->Write(scaleData[0], ySize);
150 memory->Write(scaleData[1], uvSize);
151 } else {
152 return AVCS_ERR_UNSUPPORT;
153 }
154 return AVCS_ERR_OK;
155 }
156
WriteRgbData(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t & height)157 int32_t WriteRgbData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
158 int32_t &height)
159 {
160 int32_t frameSize = static_cast<int32_t>(scaleLineSize[0] * height);
161 CHECK_AND_RETURN_RET_LOG(memory->GetCapacity() >= frameSize, AVCS_ERR_NO_MEMORY,
162 "output buffer size is not enough: real[%{public}d], need[%{public}u]",
163 memory->GetCapacity(), frameSize);
164 memory->Write(scaleData[0], frameSize);
165 return AVCS_ERR_OK;
166 }
167
WriteSurfaceData(const std::shared_ptr<AVMemory> & memory,struct SurfaceInfo & surfaceInfo,const Format & format)168 int32_t WriteSurfaceData(const std::shared_ptr<AVMemory> &memory, struct SurfaceInfo &surfaceInfo, const Format &format)
169 {
170 int32_t height;
171 int32_t fmt;
172 CHECK_AND_RETURN_RET_LOG(format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height) && height > 0,
173 AVCS_ERR_INVALID_VAL, "Invalid height %{public}d!", height);
174 CHECK_AND_RETURN_RET_LOG(format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt) &&
175 fmt >= static_cast<int32_t>(VideoPixelFormat::YUV420P) &&
176 fmt <= static_cast<int32_t>(VideoPixelFormat::RGBA),
177 AVCS_ERR_INVALID_VAL, "Cannot get pixel format");
178 VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
179 if (surfaceInfo.surfaceFence != nullptr) {
180 surfaceInfo.surfaceFence->Wait(100); // 100ms
181 }
182 uint32_t yScaleLineSize = static_cast<uint32_t>(surfaceInfo.scaleLineSize[0]);
183 if (IsYuvFormat(pixFmt)) {
184 if (surfaceInfo.surfaceStride % yScaleLineSize) {
185 return WriteYuvDataStride(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize,
186 surfaceInfo.surfaceStride, format);
187 }
188 WriteYuvData(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize, height, pixFmt);
189 } else if (IsRgbFormat(pixFmt)) {
190 if (surfaceInfo.surfaceStride % yScaleLineSize) {
191 return WriteRgbDataStride(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize,
192 surfaceInfo.surfaceStride, format);
193 }
194 WriteRgbData(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize, height);
195 } else {
196 AVCODEC_LOGE("Fill frame buffer failed : unsupported pixel format: %{public}d", pixFmt);
197 return AVCS_ERR_UNSUPPORT;
198 }
199 return AVCS_ERR_OK;
200 }
201
WriteBufferData(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,int32_t * scaleLineSize,const Format & format)202 int32_t WriteBufferData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, int32_t *scaleLineSize,
203 const Format &format)
204 {
205 int32_t height;
206 int32_t width;
207 int32_t fmt;
208 CHECK_AND_RETURN_RET_LOG(format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height) && height > 0,
209 AVCS_ERR_INVALID_VAL, "Invalid height %{public}d!", height);
210 CHECK_AND_RETURN_RET_LOG(format.GetIntValue(MediaDescriptionKey::MD_KEY_WIDTH, width) && width > 0,
211 AVCS_ERR_INVALID_VAL, "Invalid width %{public}d!", width);
212 CHECK_AND_RETURN_RET_LOG(format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt) &&
213 fmt >= static_cast<int32_t>(VideoPixelFormat::YUV420P) &&
214 fmt <= static_cast<int32_t>(VideoPixelFormat::RGBA),
215 AVCS_ERR_INVALID_VAL, "Cannot get pixel format");
216 VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
217
218 if (IsYuvFormat(pixFmt)) {
219 if (scaleLineSize[0] % width) {
220 return WriteYuvDataStride(memory, scaleData, scaleLineSize, width, format);
221 }
222 WriteYuvData(memory, scaleData, scaleLineSize, height, pixFmt);
223 } else if (IsRgbFormat(pixFmt)) {
224 if (scaleLineSize[0] % width) {
225 return WriteRgbDataStride(memory, scaleData, scaleLineSize, width * VIDEO_PIX_DEPTH_RGBA, format);
226 }
227 WriteRgbData(memory, scaleData, scaleLineSize, height);
228 } else {
229 AVCODEC_LOGE("Fill frame buffer failed : unsupported pixel format: %{public}d", pixFmt);
230 return AVCS_ERR_UNSUPPORT;
231 }
232 return AVCS_ERR_OK;
233 }
234
AVStrError(int errnum)235 std::string AVStrError(int errnum)
236 {
237 char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
238 av_strerror(errnum, errbuf, AV_ERROR_MAX_STRING_SIZE);
239 return std::string(errbuf);
240 }
241
TranslateSurfaceRotation(const VideoRotation & rotation)242 GraphicTransformType TranslateSurfaceRotation(const VideoRotation &rotation)
243 {
244 switch (rotation) {
245 case VideoRotation::VIDEO_ROTATION_90: {
246 return GRAPHIC_ROTATE_270;
247 }
248 case VideoRotation::VIDEO_ROTATION_180: {
249 return GRAPHIC_ROTATE_180;
250 }
251 case VideoRotation::VIDEO_ROTATION_270: {
252 return GRAPHIC_ROTATE_90;
253 }
254 default:
255 return GRAPHIC_ROTATE_NONE;
256 }
257 }
258
TranslateSurfaceFormat(const VideoPixelFormat & surfaceFormat)259 GraphicPixelFormat TranslateSurfaceFormat(const VideoPixelFormat &surfaceFormat)
260 {
261 switch (surfaceFormat) {
262 case VideoPixelFormat::YUVI420: {
263 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_P;
264 }
265 case VideoPixelFormat::RGBA: {
266 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_8888;
267 }
268 case VideoPixelFormat::NV12: {
269 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_SP;
270 }
271 case VideoPixelFormat::NV21: {
272 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCRCB_420_SP;
273 }
274 default:
275 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_BUTT;
276 }
277 }
278
ConvertPixelFormatFromFFmpeg(int32_t ffmpegPixelFormat)279 VideoPixelFormat ConvertPixelFormatFromFFmpeg(int32_t ffmpegPixelFormat)
280 {
281 auto iter = std::find_if(
282 g_pixelFormatMap.begin(), g_pixelFormatMap.end(),
283 [&](const std::pair<VideoPixelFormat, AVPixelFormat> &tmp) -> bool { return tmp.second == ffmpegPixelFormat; });
284 return iter == g_pixelFormatMap.end() ? VideoPixelFormat::UNKNOWN : iter->first;
285 }
286
ConvertPixelFormatToFFmpeg(VideoPixelFormat pixelFormat)287 AVPixelFormat ConvertPixelFormatToFFmpeg(VideoPixelFormat pixelFormat)
288 {
289 auto iter = std::find_if(
290 g_pixelFormatMap.begin(), g_pixelFormatMap.end(),
291 [&](const std::pair<VideoPixelFormat, AVPixelFormat> &tmp) -> bool { return tmp.first == pixelFormat; });
292 return iter == g_pixelFormatMap.end() ? AV_PIX_FMT_NONE : iter->second;
293 }
294
IsYuvFormat(VideoPixelFormat & format)295 bool IsYuvFormat(VideoPixelFormat &format)
296 {
297 return (format == VideoPixelFormat::YUVI420 || format == VideoPixelFormat::NV12 ||
298 format == VideoPixelFormat::NV21);
299 }
300
IsRgbFormat(VideoPixelFormat & format)301 bool IsRgbFormat(VideoPixelFormat &format)
302 {
303 return (format == VideoPixelFormat::RGBA);
304 }
305
Init(const ScalePara & scalePara,uint8_t ** dstData,int32_t * dstLineSize)306 int32_t Scale::Init(const ScalePara &scalePara, uint8_t **dstData, int32_t *dstLineSize)
307 {
308 scalePara_ = scalePara;
309 if (swsCtx_ != nullptr) {
310 return AVCS_ERR_OK;
311 }
312 auto swsContext =
313 sws_getContext(scalePara_.srcWidth, scalePara_.srcHeight, scalePara_.srcFfFmt, scalePara_.dstWidth,
314 scalePara_.dstHeight, scalePara_.dstFfFmt, SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
315 if (swsContext == nullptr) {
316 return AVCS_ERR_UNKNOWN;
317 }
318 swsCtx_ = std::shared_ptr<SwsContext>(swsContext, [](struct SwsContext *ptr) {
319 if (ptr != nullptr) {
320 sws_freeContext(ptr);
321 }
322 });
323 auto ret = av_image_alloc(dstData, dstLineSize, scalePara_.dstWidth, scalePara_.dstHeight, scalePara_.dstFfFmt,
324 scalePara_.align);
325 if (ret < 0) {
326 return AVCS_ERR_UNKNOWN;
327 }
328 for (int32_t i = 0; dstLineSize[i] > 0; i++) {
329 if (dstData[i] && !dstLineSize[i]) {
330 return AVCS_ERR_UNKNOWN;
331 }
332 }
333 return AVCS_ERR_OK;
334 }
335
Convert(uint8_t ** srcData,const int32_t * srcLineSize,uint8_t ** dstData,int32_t * dstLineSize)336 int32_t Scale::Convert(uint8_t **srcData, const int32_t *srcLineSize, uint8_t **dstData, int32_t *dstLineSize)
337 {
338 auto res = sws_scale(swsCtx_.get(), srcData, srcLineSize, 0, scalePara_.srcHeight, dstData, dstLineSize);
339 if (res < 0) {
340 return AVCS_ERR_UNKNOWN;
341 }
342 return AVCS_ERR_OK;
343 }
344 } // namespace Codec
345 } // namespace MediaAVCodec
346 } // namespace OHOS