1 /*
2 * Copyright (C) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "codec_utils.h"
17 #include "avcodec_log.h"
18 #include "media_description.h"
19 namespace OHOS {
20 namespace MediaAVCodec {
21 namespace Codec {
22 namespace {
23 constexpr OHOS::HiviewDFX::HiLogLabel LABEL = {LOG_CORE, LOG_DOMAIN, "FCodec"};
24 constexpr uint32_t INDEX_ARRAY = 2;
25 std::map<VideoPixelFormat, AVPixelFormat> g_pixelFormatMap = {
26 {VideoPixelFormat::YUV420P, AV_PIX_FMT_YUV420P},
27 {VideoPixelFormat::NV12, AV_PIX_FMT_NV12},
28 {VideoPixelFormat::NV21, AV_PIX_FMT_NV21},
29 {VideoPixelFormat::RGBA, AV_PIX_FMT_RGBA},
30 };
31 } // namespace
32
ConvertVideoFrame(std::shared_ptr<Scale> * scale,std::shared_ptr<AVFrame> frame,uint8_t ** dstData,int32_t * dstLineSize,AVPixelFormat dstPixFmt)33 int32_t ConvertVideoFrame(std::shared_ptr<Scale> *scale, std::shared_ptr<AVFrame> frame, uint8_t **dstData,
34 int32_t *dstLineSize, AVPixelFormat dstPixFmt)
35 {
36 if (*scale == nullptr) {
37 *scale = std::make_shared<Scale>();
38 ScalePara scalePara {static_cast<int32_t>(frame->width),
39 static_cast<int32_t>(frame->height),
40 static_cast<AVPixelFormat>(frame->format),
41 static_cast<int32_t>(frame->width),
42 static_cast<int32_t>(frame->height),
43 dstPixFmt};
44 CHECK_AND_RETURN_RET_LOG((*scale)->Init(scalePara, dstData, dstLineSize) == AVCS_ERR_OK, AVCS_ERR_UNKNOWN,
45 "Scale init error");
46 }
47 return (*scale)->Convert(frame->data, frame->linesize, dstData, dstLineSize);
48 }
49
WriteYuvDataStride(const std::shared_ptr<SurfaceMemory> & surfaceMemory,uint8_t ** scaleData,int32_t * scaleLineSize,int32_t stride,const Format & format)50 int32_t WriteYuvDataStride(const std::shared_ptr<SurfaceMemory> &surfaceMemory, uint8_t **scaleData,
51 int32_t *scaleLineSize, int32_t stride, const Format &format)
52 {
53 int32_t height;
54 int32_t fmt;
55 format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
56 format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt);
57 VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
58
59 int32_t srcPos = 0;
60 int32_t dstPos = 0;
61 if (pixFmt == VideoPixelFormat::YUV420P) {
62 auto writeSize = scaleLineSize[0];
63 for (int32_t colNum = 0; colNum < height; colNum++) {
64 surfaceMemory->Write(scaleData[0] + srcPos, writeSize, dstPos);
65 dstPos += stride;
66 srcPos += writeSize;
67 }
68 srcPos = 0;
69 writeSize = scaleLineSize[1];
70 for (int32_t colNum = 0; colNum < (height >> 1); colNum++) {
71 surfaceMemory->Write(scaleData[1] + srcPos, writeSize, dstPos);
72 dstPos += (stride >> 1);
73 srcPos += writeSize;
74 }
75 srcPos = 0;
76 writeSize = scaleLineSize[INDEX_ARRAY];
77 for (int32_t colNum = 0; colNum < (height >> 1); colNum++) {
78 surfaceMemory->Write(scaleData[INDEX_ARRAY] + srcPos, writeSize, dstPos);
79 dstPos += (stride >> 1);
80 srcPos += writeSize;
81 }
82 } else if ((pixFmt == VideoPixelFormat::NV12) || (pixFmt == VideoPixelFormat::NV21)) {
83 auto writeSize = scaleLineSize[0];
84 for (int32_t colNum = 0; colNum < height; colNum++) {
85 surfaceMemory->Write(scaleData[0] + srcPos, writeSize, dstPos);
86 dstPos += stride;
87 srcPos += writeSize;
88 }
89 srcPos = 0;
90 writeSize = scaleLineSize[1];
91 for (int32_t colNum = 0; colNum < (height >> 1); colNum++) {
92 surfaceMemory->Write(scaleData[1] + srcPos, writeSize, dstPos);
93 dstPos += stride;
94 srcPos += writeSize;
95 }
96 } else {
97 return AVCS_ERR_UNSUPPORT;
98 }
99 AVCODEC_LOGD("WriteYuvDataStride success");
100 return AVCS_ERR_OK;
101 }
102
WriteRgbDataStride(const std::shared_ptr<SurfaceMemory> & surfaceMemory,uint8_t ** scaleData,int32_t * scaleLineSize,int32_t stride,const Format & format)103 int32_t WriteRgbDataStride(const std::shared_ptr<SurfaceMemory> &surfaceMemory, uint8_t **scaleData,
104 int32_t *scaleLineSize, int32_t stride, const Format &format)
105 {
106 int32_t height;
107 format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
108 int32_t srcPos = 0;
109 int32_t dstPos = 0;
110 int32_t writeSize = scaleLineSize[0];
111 for (int32_t colNum = 0; colNum < height; colNum++) {
112 surfaceMemory->Write(scaleData[0] + srcPos, writeSize, dstPos);
113 dstPos += stride;
114 srcPos += writeSize;
115 }
116
117 AVCODEC_LOGD("WriteRgbDataStride success");
118 return AVCS_ERR_OK;
119 }
120
121 template <typename T>
WriteYuvData(const T & memory,uint8_t ** scaleData,int32_t * scaleLineSize,int32_t & height,VideoPixelFormat & pixFmt)122 int32_t WriteYuvData(const T &memory, uint8_t **scaleData, int32_t *scaleLineSize, int32_t &height,
123 VideoPixelFormat &pixFmt)
124 {
125 int32_t ySize = static_cast<int32_t>(scaleLineSize[0] * height); // yuv420: 411 nv21
126 int32_t uvSize = static_cast<int32_t>(scaleLineSize[1] * height / 2); // 2
127 int32_t frameSize = 0;
128 if (pixFmt == VideoPixelFormat::YUV420P) {
129 frameSize = ySize + (uvSize * 2); // 2
130 } else if (pixFmt == VideoPixelFormat::NV21 || pixFmt == VideoPixelFormat::NV12) {
131 frameSize = ySize + uvSize;
132 }
133 CHECK_AND_RETURN_RET_LOG(memory->GetSize() >= frameSize, AVCS_ERR_NO_MEMORY,
134 "output buffer size is not enough: real[%{public}d], need[%{public}u]", memory->GetSize(),
135 frameSize);
136 if (pixFmt == VideoPixelFormat::YUV420P) {
137 memory->Write(scaleData[0], ySize);
138 memory->Write(scaleData[1], uvSize);
139 memory->Write(scaleData[2], uvSize); // 2
140 } else if ((pixFmt == VideoPixelFormat::NV12) || (pixFmt == VideoPixelFormat::NV21)) {
141 memory->Write(scaleData[0], ySize);
142 memory->Write(scaleData[1], uvSize);
143 } else {
144 return AVCS_ERR_UNSUPPORT;
145 }
146 return AVCS_ERR_OK;
147 }
148
149 template <typename T>
WriteRgbData(const T & memory,uint8_t ** scaleData,int32_t * scaleLineSize,int32_t & height)150 int32_t WriteRgbData(const T &memory, uint8_t **scaleData, int32_t *scaleLineSize, int32_t &height)
151 {
152 int32_t frameSize = static_cast<int32_t>(scaleLineSize[0] * height);
153 CHECK_AND_RETURN_RET_LOG(memory->GetSize() >= frameSize, AVCS_ERR_NO_MEMORY,
154 "output buffer size is not enough: real[%{public}d], need[%{public}u]", memory->GetSize(),
155 frameSize);
156 memory->Write(scaleData[0], frameSize);
157 return AVCS_ERR_OK;
158 }
159
WriteSurfaceData(const std::shared_ptr<SurfaceMemory> & surfaceMemory,uint8_t ** scaleData,int32_t * scaleLineSize,const Format & format)160 int32_t WriteSurfaceData(const std::shared_ptr<SurfaceMemory> &surfaceMemory, uint8_t **scaleData,
161 int32_t *scaleLineSize, const Format &format)
162 {
163 int32_t width;
164 int32_t height;
165 int32_t fmt;
166 format.GetIntValue(MediaDescriptionKey::MD_KEY_WIDTH, width);
167 format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
168 format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt);
169 VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
170 uint32_t stride = surfaceMemory->GetSurfaceBufferStride();
171
172 surfaceMemory->ClearUsedSize();
173 sptr<SyncFence> autoFence = new (std::nothrow) SyncFence(surfaceMemory->GetFence());
174 if (autoFence != nullptr) {
175 autoFence->Wait(100); // 100ms
176 }
177
178 if (IsYuvFormat(pixFmt)) {
179 if (stride % width) {
180 return WriteYuvDataStride(surfaceMemory, scaleData, scaleLineSize, stride, format);
181 }
182 WriteYuvData(surfaceMemory, scaleData, scaleLineSize, height, pixFmt);
183 } else if (IsRgbFormat(pixFmt)) {
184 if (stride % width) {
185 return WriteRgbDataStride(surfaceMemory, scaleData, scaleLineSize, stride, format);
186 }
187 WriteRgbData(surfaceMemory, scaleData, scaleLineSize, height);
188 } else {
189 AVCODEC_LOGE("Fill frame buffer failed : unsupported pixel format: %{public}d", pixFmt);
190 return AVCS_ERR_UNSUPPORT;
191 }
192 return AVCS_ERR_OK;
193 }
194
WriteBufferData(const std::shared_ptr<AVSharedMemoryBase> & bufferMemory,uint8_t ** scaleData,int32_t * scaleLineSize,const Format & format)195 int32_t WriteBufferData(const std::shared_ptr<AVSharedMemoryBase> &bufferMemory, uint8_t **scaleData,
196 int32_t *scaleLineSize, const Format &format)
197 {
198 int32_t height;
199 int32_t fmt;
200 format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height);
201 format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt);
202 VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt);
203
204 bufferMemory->ClearUsedSize();
205 if (IsYuvFormat(pixFmt)) {
206 WriteYuvData(bufferMemory, scaleData, scaleLineSize, height, pixFmt);
207 } else if (IsRgbFormat(pixFmt)) {
208 WriteRgbData(bufferMemory, scaleData, scaleLineSize, height);
209 } else {
210 AVCODEC_LOGE("Fill frame buffer failed : unsupported pixel format: %{public}d", pixFmt);
211 return AVCS_ERR_UNSUPPORT;
212 }
213 return AVCS_ERR_OK;
214 }
215
AVStrError(int errnum)216 std::string AVStrError(int errnum)
217 {
218 char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
219 av_strerror(errnum, errbuf, AV_ERROR_MAX_STRING_SIZE);
220 return std::string(errbuf);
221 }
222
TranslateSurfaceRotation(const VideoRotation & rotation)223 GraphicTransformType TranslateSurfaceRotation(const VideoRotation &rotation)
224 {
225 switch (rotation) {
226 case VideoRotation::VIDEO_ROTATION_90: {
227 return GRAPHIC_ROTATE_270;
228 }
229 case VideoRotation::VIDEO_ROTATION_180: {
230 return GRAPHIC_ROTATE_180;
231 }
232 case VideoRotation::VIDEO_ROTATION_270: {
233 return GRAPHIC_ROTATE_90;
234 }
235 default:
236 return GRAPHIC_ROTATE_NONE;
237 }
238 }
239
TranslateSurfaceFormat(const VideoPixelFormat & surfaceFormat)240 GraphicPixelFormat TranslateSurfaceFormat(const VideoPixelFormat &surfaceFormat)
241 {
242 switch (surfaceFormat) {
243 case VideoPixelFormat::YUV420P: {
244 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_P;
245 }
246 case VideoPixelFormat::RGBA: {
247 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_8888;
248 }
249 case VideoPixelFormat::NV12: {
250 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_SP;
251 }
252 case VideoPixelFormat::NV21: {
253 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCRCB_420_SP;
254 }
255 default:
256 return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_BUTT;
257 }
258 }
259
ConvertPixelFormatFromFFmpeg(int32_t ffmpegPixelFormat)260 VideoPixelFormat ConvertPixelFormatFromFFmpeg(int32_t ffmpegPixelFormat)
261 {
262 auto iter = std::find_if(
263 g_pixelFormatMap.begin(), g_pixelFormatMap.end(),
264 [&](const std::pair<VideoPixelFormat, AVPixelFormat> &tmp) -> bool { return tmp.second == ffmpegPixelFormat; });
265 return iter == g_pixelFormatMap.end() ? VideoPixelFormat::UNKNOWN_FORMAT : iter->first;
266 }
267
ConvertPixelFormatToFFmpeg(VideoPixelFormat pixelFormat)268 AVPixelFormat ConvertPixelFormatToFFmpeg(VideoPixelFormat pixelFormat)
269 {
270 auto iter = std::find_if(
271 g_pixelFormatMap.begin(), g_pixelFormatMap.end(),
272 [&](const std::pair<VideoPixelFormat, AVPixelFormat> &tmp) -> bool { return tmp.first == pixelFormat; });
273 return iter == g_pixelFormatMap.end() ? AV_PIX_FMT_NONE : iter->second;
274 }
275
IsYuvFormat(VideoPixelFormat & format)276 bool IsYuvFormat(VideoPixelFormat &format)
277 {
278 return (format == VideoPixelFormat::YUV420P || format == VideoPixelFormat::NV12 ||
279 format == VideoPixelFormat::NV21);
280 }
281
IsRgbFormat(VideoPixelFormat & format)282 bool IsRgbFormat(VideoPixelFormat &format)
283 {
284 return (format == VideoPixelFormat::RGBA);
285 }
286
Init(const ScalePara & scalePara,uint8_t ** dstData,int32_t * dstLineSize)287 int32_t Scale::Init(const ScalePara &scalePara, uint8_t **dstData, int32_t *dstLineSize)
288 {
289 scalePara_ = scalePara;
290 if (swsCtx_ != nullptr) {
291 return AVCS_ERR_OK;
292 }
293 auto swsContext =
294 sws_getContext(scalePara_.srcWidth, scalePara_.srcHeight, scalePara_.srcFfFmt, scalePara_.dstWidth,
295 scalePara_.dstHeight, scalePara_.dstFfFmt, SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
296 if (swsContext == nullptr) {
297 return AVCS_ERR_UNKNOWN;
298 }
299 swsCtx_ = std::shared_ptr<SwsContext>(swsContext, [](struct SwsContext *ptr) {
300 if (ptr != nullptr) {
301 sws_freeContext(ptr);
302 }
303 });
304 auto ret = av_image_alloc(dstData, dstLineSize, scalePara_.dstWidth, scalePara_.dstHeight, scalePara_.dstFfFmt,
305 scalePara_.align);
306 if (ret < 0) {
307 return AVCS_ERR_UNKNOWN;
308 }
309 for (int32_t i = 0; dstLineSize[i] > 0; i++) {
310 if (dstData[i] && !dstLineSize[i]) {
311 return AVCS_ERR_UNKNOWN;
312 }
313 }
314 return AVCS_ERR_OK;
315 }
316
Convert(uint8_t ** srcData,const int32_t * srcLineSize,uint8_t ** dstData,int32_t * dstLineSize)317 int32_t Scale::Convert(uint8_t **srcData, const int32_t *srcLineSize, uint8_t **dstData, int32_t *dstLineSize)
318 {
319 auto res = sws_scale(swsCtx_.get(), srcData, srcLineSize, 0, scalePara_.srcHeight, dstData, dstLineSize);
320 if (res < 0) {
321 return AVCS_ERR_UNKNOWN;
322 }
323 return AVCS_ERR_OK;
324 }
325 } // namespace Codec
326 } // namespace MediaAVCodec
327 } // namespace OHOS