1 /*
2 * Copyright (C) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "codec_utils.h"
17 #include "avcodec_log.h"
18 #include "avcodec_trace.h"
19 #include "media_description.h"
20 namespace OHOS {
21 namespace MediaAVCodec {
22 namespace Codec {
23 namespace {
24 constexpr OHOS::HiviewDFX::HiLogLabel LABEL = {LOG_CORE, LOG_DOMAIN_FRAMEWORK, "CODEC_UTILS"};
25 constexpr uint32_t INDEX_ARRAY = 2;
26 constexpr uint32_t WAIT_FENCE_MS = 1000;
27 std::map<VideoPixelFormat, AVPixelFormat> g_pixelFormatMap = {
28 {VideoPixelFormat::YUVI420, AV_PIX_FMT_YUV420P},
29 {VideoPixelFormat::NV12, AV_PIX_FMT_NV12},
30 {VideoPixelFormat::NV21, AV_PIX_FMT_NV21},
31 {VideoPixelFormat::RGBA, AV_PIX_FMT_RGBA},
32 };
33 } // namespace
34
35 using namespace OHOS::Media;
ConvertVideoFrame(std::shared_ptr<Scale> * scale,std::shared_ptr<AVFrame> frame,uint8_t ** dstData,int32_t * dstLineSize,AVPixelFormat dstPixFmt)36 int32_t ConvertVideoFrame(std::shared_ptr<Scale> *scale, std::shared_ptr<AVFrame> frame, uint8_t **dstData,
37 int32_t *dstLineSize, AVPixelFormat dstPixFmt)
38 {
39 if (*scale == nullptr) {
40 *scale = std::make_shared<Scale>();
41 ScalePara scalePara{static_cast<int32_t>(frame->width), static_cast<int32_t>(frame->height),
42 static_cast<AVPixelFormat>(frame->format), static_cast<int32_t>(frame->width),
43 static_cast<int32_t>(frame->height), dstPixFmt};
44 CHECK_AND_RETURN_RET_LOG((*scale)->Init(scalePara, dstData, dstLineSize) == AVCS_ERR_OK, AVCS_ERR_UNKNOWN,
45 "Scale init error");
46 }
47 return (*scale)->Convert(frame->data, frame->linesize, dstData, dstLineSize);
48 }
49
ConvertVideoFrame(std::shared_ptr<Scale> * scale,uint8_t ** srcData,int32_t * srcLineSize,AVPixelFormat srcPixFmt,int32_t srcWidth,int32_t srcHeight,uint8_t ** dstData,int32_t * dstLineSize,AVPixelFormat dstPixFmt)50 int32_t ConvertVideoFrame(std::shared_ptr<Scale> *scale,
51 uint8_t **srcData, int32_t *srcLineSize, AVPixelFormat srcPixFmt,
52 int32_t srcWidth, int32_t srcHeight,
53 uint8_t **dstData, int32_t *dstLineSize, AVPixelFormat dstPixFmt)
54 {
55 if (*scale == nullptr) {
56 *scale = std::make_shared<Scale>();
57 ScalePara scalePara{srcWidth, srcHeight, srcPixFmt,
58 srcWidth, srcHeight, dstPixFmt};
59 CHECK_AND_RETURN_RET_LOG((*scale)->Init(scalePara, dstData, dstLineSize) == AVCS_ERR_OK, AVCS_ERR_UNKNOWN,
60 "Scale init error");
61 }
62 return (*scale)->Convert(srcData, srcLineSize, dstData, dstLineSize);
63 }
64
WriteYuvDataStride(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t stride,const Format & format)65 int32_t WriteYuvDataStride(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
66 int32_t stride, const Format &format)
67 {
68 int32_t height;
69 int32_t fmt;
70 format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
71 format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt);
72 VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
73 CHECK_AND_RETURN_RET_LOG(pixFmt == VideoPixelFormat::YUVI420 || pixFmt == VideoPixelFormat::NV12 ||
74 pixFmt == VideoPixelFormat::NV21,
75 AVCS_ERR_UNSUPPORT, "pixFmt: %{public}d do not support", pixFmt);
76 int32_t srcPos = 0;
77 int32_t dstPos = 0;
78 int32_t dataSize = scaleLineSize[0];
79 int32_t writeSize = dataSize > stride ? stride : dataSize;
80 std::string traceTitle = "stride(" + std::to_string(stride) + ")_height(" + std::to_string(height) + ")";
81 AVCodecTrace trace("WriteYuvByStride_pixfmt(" + std::to_string(fmt) + ")_" + traceTitle);
82 for (int32_t colNum = 0; colNum < height; colNum++) {
83 memory->Write(scaleData[0] + srcPos, writeSize, dstPos);
84 dstPos += stride;
85 srcPos += dataSize;
86 }
87 srcPos = 0;
88 stride = ((stride + UV_SCALE_FACTOR - 1) / UV_SCALE_FACTOR) * UV_SCALE_FACTOR;
89 height = ((height + UV_SCALE_FACTOR - 1) / UV_SCALE_FACTOR) * UV_SCALE_FACTOR;
90 if (pixFmt == VideoPixelFormat::YUVI420) {
91 dataSize = scaleLineSize[1];
92 writeSize = dataSize > (stride / UV_SCALE_FACTOR) ? (stride / UV_SCALE_FACTOR) : dataSize;
93 for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) {
94 memory->Write(scaleData[1] + srcPos, writeSize, dstPos);
95 dstPos += (stride / UV_SCALE_FACTOR);
96 srcPos += dataSize;
97 }
98 srcPos = 0;
99 for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) {
100 memory->Write(scaleData[INDEX_ARRAY] + srcPos, writeSize, dstPos);
101 dstPos += (stride / UV_SCALE_FACTOR);
102 srcPos += dataSize;
103 }
104 } else if ((pixFmt == VideoPixelFormat::NV12) || (pixFmt == VideoPixelFormat::NV21)) {
105 dataSize = scaleLineSize[1];
106 writeSize = dataSize > stride ? stride : dataSize;
107 for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) {
108 memory->Write(scaleData[1] + srcPos, writeSize, dstPos);
109 dstPos += stride;
110 srcPos += dataSize;
111 }
112 }
113 AVCODEC_LOGD("WriteYuvDataStride success");
114 return AVCS_ERR_OK;
115 }
116
WriteRgbDataStride(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t stride,const Format & format)117 int32_t WriteRgbDataStride(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
118 int32_t stride, const Format &format)
119 {
120 int32_t height;
121 format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
122 int32_t srcPos = 0;
123 int32_t dstPos = 0;
124 int32_t dataSize = scaleLineSize[0];
125 int32_t writeSize = dataSize > stride ? stride : dataSize;
126 std::string traceTitle = "stride(" + std::to_string(stride) + ")_height(" + std::to_string(height) + ")";
127 AVCodecTrace trace("WriteRgbByStride_" + traceTitle);
128 for (int32_t colNum = 0; colNum < height; colNum++) {
129 memory->Write(scaleData[0] + srcPos, writeSize, dstPos);
130 dstPos += stride;
131 srcPos += dataSize;
132 }
133 AVCODEC_LOGD("WriteRgbDataStride success");
134 return AVCS_ERR_OK;
135 }
136
WriteYuvData(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t & height,VideoPixelFormat & pixFmt)137 int32_t WriteYuvData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
138 int32_t &height, VideoPixelFormat &pixFmt)
139 {
140 AVCODEC_SYNC_TRACE;
141 int32_t ySize = static_cast<int32_t>(scaleLineSize[0] * height); // yuv420: 411 nv21
142 int32_t uvSize = static_cast<int32_t>(scaleLineSize[1] * height / 2); // 2
143 int32_t frameSize = 0;
144 if (pixFmt == VideoPixelFormat::YUVI420) {
145 frameSize = ySize + (uvSize * 2); // 2
146 } else if (pixFmt == VideoPixelFormat::NV21 || pixFmt == VideoPixelFormat::NV12) {
147 frameSize = ySize + uvSize;
148 }
149 CHECK_AND_RETURN_RET_LOG(memory->GetCapacity() >= frameSize, AVCS_ERR_NO_MEMORY,
150 "output buffer size is not enough: real[%{public}d], need[%{public}u]",
151 memory->GetCapacity(), frameSize);
152 if (pixFmt == VideoPixelFormat::YUVI420) {
153 memory->Write(scaleData[0], ySize);
154 memory->Write(scaleData[1], uvSize);
155 memory->Write(scaleData[2], uvSize); // 2
156 } else if ((pixFmt == VideoPixelFormat::NV12) || (pixFmt == VideoPixelFormat::NV21)) {
157 memory->Write(scaleData[0], ySize);
158 memory->Write(scaleData[1], uvSize);
159 } else {
160 return AVCS_ERR_UNSUPPORT;
161 }
162 return AVCS_ERR_OK;
163 }
164
WriteRgbData(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,const int32_t * scaleLineSize,int32_t & height)165 int32_t WriteRgbData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize,
166 int32_t &height)
167 {
168 AVCODEC_SYNC_TRACE;
169 int32_t frameSize = static_cast<int32_t>(scaleLineSize[0] * height);
170 CHECK_AND_RETURN_RET_LOG(memory->GetCapacity() >= frameSize, AVCS_ERR_NO_MEMORY,
171 "output buffer size is not enough: real[%{public}d], need[%{public}u]",
172 memory->GetCapacity(), frameSize);
173 memory->Write(scaleData[0], frameSize);
174 return AVCS_ERR_OK;
175 }
176
WriteSurfaceData(const std::shared_ptr<AVMemory> & memory,struct SurfaceInfo & surfaceInfo,const Format & format)177 int32_t WriteSurfaceData(const std::shared_ptr<AVMemory> &memory, struct SurfaceInfo &surfaceInfo, const Format &format)
178 {
179 int32_t height;
180 int32_t fmt;
181 CHECK_AND_RETURN_RET_LOG(format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height) && height > 0,
182 AVCS_ERR_INVALID_VAL, "Invalid height %{public}d!", height);
183 CHECK_AND_RETURN_RET_LOG(format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt) &&
184 fmt >= static_cast<int32_t>(VideoPixelFormat::YUV420P) &&
185 fmt <= static_cast<int32_t>(VideoPixelFormat::RGBA),
186 AVCS_ERR_INVALID_VAL, "Cannot get pixel format");
187 VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
188 AVCODEC_SYNC_TRACE;
189 if (surfaceInfo.surfaceFence != nullptr) {
190 int32_t waitRes = surfaceInfo.surfaceFence->Wait(WAIT_FENCE_MS);
191 EXPECT_AND_LOGD(waitRes != 0, "wait fence time out, cost more than %{public}u ms", WAIT_FENCE_MS);
192 }
193 uint32_t yScaleLineSize = static_cast<uint32_t>(surfaceInfo.scaleLineSize[0]);
194 if (IsYuvFormat(pixFmt)) {
195 if (surfaceInfo.surfaceStride % yScaleLineSize) {
196 return WriteYuvDataStride(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize,
197 surfaceInfo.surfaceStride, format);
198 }
199 return WriteYuvData(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize, height, pixFmt);
200 } else if (IsRgbFormat(pixFmt)) {
201 if (surfaceInfo.surfaceStride % yScaleLineSize) {
202 return WriteRgbDataStride(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize,
203 surfaceInfo.surfaceStride, format);
204 }
205 return WriteRgbData(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize, height);
206 } else {
207 AVCODEC_LOGE("Fill frame buffer failed : unsupported pixel format: %{public}d", pixFmt);
208 return AVCS_ERR_UNSUPPORT;
209 }
210 return AVCS_ERR_OK;
211 }
212
WriteBufferData(const std::shared_ptr<AVMemory> & memory,uint8_t ** scaleData,int32_t * scaleLineSize,const Format & format)213 int32_t WriteBufferData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, int32_t *scaleLineSize,
214 const Format &format)
215 {
216 int32_t height;
217 int32_t width;
218 int32_t fmt;
219 CHECK_AND_RETURN_RET_LOG(format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height) && height > 0,
220 AVCS_ERR_INVALID_VAL, "Invalid height %{public}d!", height);
221 CHECK_AND_RETURN_RET_LOG(format.GetIntValue(MediaDescriptionKey::MD_KEY_WIDTH, width) && width > 0,
222 AVCS_ERR_INVALID_VAL, "Invalid width %{public}d!", width);
223 CHECK_AND_RETURN_RET_LOG(format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt) &&
224 fmt >= static_cast<int32_t>(VideoPixelFormat::YUV420P) &&
225 fmt <= static_cast<int32_t>(VideoPixelFormat::RGBA),
226 AVCS_ERR_INVALID_VAL, "Cannot get pixel format");
227 VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
228 AVCODEC_SYNC_TRACE;
229 if (IsYuvFormat(pixFmt)) {
230 if (scaleLineSize[0] % width) {
231 return WriteYuvDataStride(memory, scaleData, scaleLineSize, width, format);
232 }
233 return WriteYuvData(memory, scaleData, scaleLineSize, height, pixFmt);
234 } else if (IsRgbFormat(pixFmt)) {
235 if (scaleLineSize[0] % width) {
236 return WriteRgbDataStride(memory, scaleData, scaleLineSize, width * VIDEO_PIX_DEPTH_RGBA, format);
237 }
238 return WriteRgbData(memory, scaleData, scaleLineSize, height);
239 } else {
240 AVCODEC_LOGE("Fill frame buffer failed : unsupported pixel format: %{public}d", pixFmt);
241 return AVCS_ERR_UNSUPPORT;
242 }
243 return AVCS_ERR_OK;
244 }
245
AVStrError(int errnum)246 std::string AVStrError(int errnum)
247 {
248 char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
249 av_strerror(errnum, errbuf, AV_ERROR_MAX_STRING_SIZE);
250 return std::string(errbuf);
251 }
252
TranslateSurfaceRotation(const VideoRotation & rotation)253 GraphicTransformType TranslateSurfaceRotation(const VideoRotation &rotation)
254 {
255 switch (rotation) {
256 case VideoRotation::VIDEO_ROTATION_90: {
257 return GRAPHIC_ROTATE_270;
258 }
259 case VideoRotation::VIDEO_ROTATION_180: {
260 return GRAPHIC_ROTATE_180;
261 }
262 case VideoRotation::VIDEO_ROTATION_270: {
263 return GRAPHIC_ROTATE_90;
264 }
265 default:
266 return GRAPHIC_ROTATE_NONE;
267 }
268 }
269
TranslateSurfaceFormat(const VideoPixelFormat & surfaceFormat)270 GraphicPixelFormat TranslateSurfaceFormat(const VideoPixelFormat &surfaceFormat)
271 {
272 switch (surfaceFormat) {
273 case VideoPixelFormat::YUVI420: {
274 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_P;
275 }
276 case VideoPixelFormat::RGBA: {
277 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_8888;
278 }
279 case VideoPixelFormat::NV12: {
280 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_SP;
281 }
282 case VideoPixelFormat::NV21: {
283 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCRCB_420_SP;
284 }
285 default:
286 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_BUTT;
287 }
288 }
289
ConvertPixelFormatFromFFmpeg(int32_t ffmpegPixelFormat)290 VideoPixelFormat ConvertPixelFormatFromFFmpeg(int32_t ffmpegPixelFormat)
291 {
292 auto iter = std::find_if(
293 g_pixelFormatMap.begin(), g_pixelFormatMap.end(),
294 [&](const std::pair<VideoPixelFormat, AVPixelFormat> &tmp) -> bool { return tmp.second == ffmpegPixelFormat; });
295 return iter == g_pixelFormatMap.end() ? VideoPixelFormat::UNKNOWN : iter->first;
296 }
297
ConvertPixelFormatToFFmpeg(VideoPixelFormat pixelFormat)298 AVPixelFormat ConvertPixelFormatToFFmpeg(VideoPixelFormat pixelFormat)
299 {
300 auto iter = std::find_if(
301 g_pixelFormatMap.begin(), g_pixelFormatMap.end(),
302 [&](const std::pair<VideoPixelFormat, AVPixelFormat> &tmp) -> bool { return tmp.first == pixelFormat; });
303 return iter == g_pixelFormatMap.end() ? AV_PIX_FMT_NONE : iter->second;
304 }
305
IsYuvFormat(VideoPixelFormat & format)306 bool IsYuvFormat(VideoPixelFormat &format)
307 {
308 return (format == VideoPixelFormat::YUVI420 || format == VideoPixelFormat::NV12 ||
309 format == VideoPixelFormat::NV21);
310 }
311
IsRgbFormat(VideoPixelFormat & format)312 bool IsRgbFormat(VideoPixelFormat &format)
313 {
314 return (format == VideoPixelFormat::RGBA);
315 }
316
Init(const ScalePara & scalePara,uint8_t ** dstData,int32_t * dstLineSize)317 int32_t Scale::Init(const ScalePara &scalePara, uint8_t **dstData, int32_t *dstLineSize)
318 {
319 scalePara_ = scalePara;
320 if (swsCtx_ != nullptr) {
321 return AVCS_ERR_OK;
322 }
323 auto swsContext =
324 sws_getContext(scalePara_.srcWidth, scalePara_.srcHeight, scalePara_.srcFfFmt, scalePara_.dstWidth,
325 scalePara_.dstHeight, scalePara_.dstFfFmt, SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
326 if (swsContext == nullptr) {
327 return AVCS_ERR_UNKNOWN;
328 }
329 swsCtx_ = std::shared_ptr<SwsContext>(swsContext, [](struct SwsContext *ptr) {
330 if (ptr != nullptr) {
331 sws_freeContext(ptr);
332 }
333 });
334 auto ret = av_image_alloc(dstData, dstLineSize, scalePara_.dstWidth, scalePara_.dstHeight, scalePara_.dstFfFmt,
335 scalePara_.align);
336 if (ret < 0) {
337 return AVCS_ERR_UNKNOWN;
338 }
339 for (int32_t i = 0; dstLineSize[i] > 0; i++) {
340 if (dstData[i] && !dstLineSize[i]) {
341 return AVCS_ERR_UNKNOWN;
342 }
343 }
344 return AVCS_ERR_OK;
345 }
346
Convert(uint8_t ** srcData,const int32_t * srcLineSize,uint8_t ** dstData,int32_t * dstLineSize)347 int32_t Scale::Convert(uint8_t **srcData, const int32_t *srcLineSize, uint8_t **dstData, int32_t *dstLineSize)
348 {
349 auto res = sws_scale(swsCtx_.get(), srcData, srcLineSize, 0, scalePara_.srcHeight, dstData, dstLineSize);
350 if (res < 0) {
351 return AVCS_ERR_UNKNOWN;
352 }
353 return AVCS_ERR_OK;
354 }
355 } // namespace Codec
356 } // namespace MediaAVCodec
357 } // namespace OHOS