• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "color_format_process.h"
17 
18 #include "distributed_hardware_log.h"
19 
20 #include "distributed_camera_errno.h"
21 
22 namespace OHOS {
23 namespace DistributedHardware {
~ColorFormatProcess()24 ColorFormatProcess::~ColorFormatProcess()
25 {
26     if (isColorFormatProcess_.load()) {
27         DHLOGD("~ColorFormatProcess : ReleaseProcessNode.");
28         ReleaseProcessNode();
29     }
30 }
31 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)32 int32_t ColorFormatProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
33     VideoConfigParams& processedConfig)
34 {
35     DHLOGD("ColorFormatProcess : InitNode.");
36     if (!IsConvertible(sourceConfig, targetConfig)) {
37         DHLOGE("sourceConfig: Videoformat %d Width %d, Height %d, targetConfig: Videoformat %d Width %d, Height %d.",
38             sourceConfig.GetVideoformat(), sourceConfig.GetWidth(), sourceConfig.GetHeight(),
39             targetConfig.GetVideoformat(), targetConfig.GetWidth(), targetConfig.GetHeight());
40         return DCAMERA_BAD_TYPE;
41     }
42 
43     sourceConfig_ = sourceConfig;
44     targetConfig_ = targetConfig;
45     processedConfig_ = sourceConfig;
46 
47     if (sourceConfig_.GetVideoformat() != targetConfig_.GetVideoformat()) {
48         processedConfig_.SetVideoformat(targetConfig_.GetVideoformat());
49     }
50 
51     processedConfig = processedConfig_;
52     isColorFormatProcess_.store(true);
53     return DCAMERA_OK;
54 }
55 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)56 bool ColorFormatProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
57 {
58     return ((sourceConfig.GetVideoformat() == targetConfig.GetVideoformat() ||
59         (sourceConfig.GetVideoformat() == Videoformat::NV12 && targetConfig.GetVideoformat() == Videoformat::NV21)) &&
60         sourceConfig.GetWidth() == targetConfig.GetWidth() && sourceConfig.GetHeight() == targetConfig.GetHeight());
61 }
62 
ReleaseProcessNode()63 void ColorFormatProcess::ReleaseProcessNode()
64 {
65     DHLOGD("Start release [%d] node : ColorFormatNode.", nodeRank_);
66     isColorFormatProcess_.store(false);
67 
68     if (nextDataProcess_ != nullptr) {
69         nextDataProcess_->ReleaseProcessNode();
70         nextDataProcess_ = nullptr;
71     }
72     DHLOGD("Release [%d] node : ColorFormatNode end.", nodeRank_);
73 }
74 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)75 int32_t ColorFormatProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
76 {
77     DHLOGD("Process data in ColorFormatProcess.");
78     if (inputBuffers.empty() || inputBuffers[0] == nullptr) {
79         DHLOGE("The input data buffers is empty.");
80         return DCAMERA_BAD_VALUE;
81     }
82 
83     if (sourceConfig_.GetVideoformat() == processedConfig_.GetVideoformat()) {
84         DHLOGD("The target Video Format : %d is the same as the source Video Format : %d.",
85             sourceConfig_.GetVideoformat(), processedConfig_.GetVideoformat());
86         return ColorFormatDone(inputBuffers);
87     }
88 
89     int64_t timeStamp = 0;
90     if (!(inputBuffers[0]->FindInt64("timeUs", timeStamp))) {
91         DHLOGE("ColorConvertProcess : Find inputBuffer timeStamp failed.");
92         return DCAMERA_BAD_VALUE;
93     }
94 
95     ImageUnitInfo srcImgInfo {Videoformat::YUVI420, 0, 0, 0, 0, 0, 0, nullptr};
96     if (GetImageUnitInfo(srcImgInfo, inputBuffers[0]) != DCAMERA_OK || !CheckColorProcessInputInfo(srcImgInfo)) {
97         DHLOGE("ColorConvertProcess : srcImgInfo error.");
98         return DCAMERA_BAD_VALUE;
99     }
100 
101     size_t dstBufsize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
102         YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
103     std::shared_ptr<DataBuffer> dstBuf = std::make_shared<DataBuffer>(dstBufsize);
104     ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), processedConfig_.GetWidth(),
105         processedConfig_.GetHeight(), processedConfig_.GetWidth(), processedConfig_.GetHeight(),
106         processedConfig_.GetWidth() * processedConfig_.GetHeight(), dstBuf->Size(), dstBuf->Data() };
107     if (ColorConvertByColorFormat(srcImgInfo, dstImgInfo) != DCAMERA_OK) {
108         DHLOGE("ColorConvertProcess : ColorConvertByColorFormat failed.");
109         return DCAMERA_BAD_OPERATE;
110     }
111 
112     dstBuf->SetInt64("timeUs", timeStamp);
113     dstBuf->SetInt32("Videoformat", static_cast<int32_t>(processedConfig_.GetVideoformat()));
114     dstBuf->SetInt32("alignedWidth", processedConfig_.GetWidth());
115     dstBuf->SetInt32("alignedHeight", processedConfig_.GetHeight());
116     dstBuf->SetInt32("width", processedConfig_.GetWidth());
117     dstBuf->SetInt32("height", processedConfig_.GetHeight());
118 
119     std::vector<std::shared_ptr<DataBuffer>> outputBuffers;
120     outputBuffers.push_back(dstBuf);
121     return ColorFormatDone(outputBuffers);
122 }
123 
GetImageUnitInfo(ImageUnitInfo & imgInfo,const std::shared_ptr<DataBuffer> & imgBuf)124 int32_t ColorFormatProcess::GetImageUnitInfo(ImageUnitInfo& imgInfo, const std::shared_ptr<DataBuffer>& imgBuf)
125 {
126     if (imgBuf == nullptr) {
127         DHLOGE("GetImageUnitInfo failed, imgBuf is nullptr.");
128         return DCAMERA_BAD_VALUE;
129     }
130 
131     bool findErr = true;
132     int32_t colorFormat = 0;
133     findErr = findErr && imgBuf->FindInt32("Videoformat", colorFormat);
134     if (!findErr) {
135         DHLOGE("GetImageUnitInfo failed, Videoformat is null.");
136         return DCAMERA_NOT_FOUND;
137     }
138     if (colorFormat != static_cast<int32_t>(Videoformat::YUVI420) &&
139         colorFormat != static_cast<int32_t>(Videoformat::NV12) &&
140         colorFormat != static_cast<int32_t>(Videoformat::NV21)) {
141         DHLOGE("GetImageUnitInfo failed, colorFormat %d are not supported.", colorFormat);
142         return DCAMERA_NOT_FOUND;
143     }
144     imgInfo.colorFormat = static_cast<Videoformat>(colorFormat);
145     findErr = findErr && imgBuf->FindInt32("width", imgInfo.width);
146     findErr = findErr && imgBuf->FindInt32("height", imgInfo.height);
147     findErr = findErr && imgBuf->FindInt32("alignedWidth", imgInfo.alignedWidth);
148     findErr = findErr && imgBuf->FindInt32("alignedHeight", imgInfo.alignedHeight);
149     if (!findErr) {
150         DHLOGE("GetImageUnitInfo failed, width %d, height %d, alignedWidth %d, alignedHeight %d.",
151             imgInfo.width, imgInfo.height, imgInfo.alignedWidth, imgInfo.alignedHeight);
152         return DCAMERA_NOT_FOUND;
153     }
154 
155     imgInfo.chromaOffset = static_cast<size_t>(imgInfo.alignedWidth * imgInfo.alignedHeight);
156     imgInfo.imgSize = imgBuf->Size();
157     imgInfo.imgData = imgBuf->Data();
158     if (imgInfo.imgData == nullptr) {
159         DHLOGE("Get the imgData of the imgBuf failed.");
160         return DCAMERA_BAD_VALUE;
161     }
162     DHLOGD("ColorFormatProcess imgBuf info : Videoformat %d, alignedWidth %d, alignedHeight %d, width %d, height %d," +
163         " chromaOffset %d, imgSize %d.", imgInfo.colorFormat, imgInfo.width, imgInfo.height, imgInfo.alignedWidth,
164         imgInfo.alignedHeight, imgInfo.chromaOffset, imgInfo.imgSize);
165     return DCAMERA_OK;
166 }
167 
CheckColorProcessInputInfo(const ImageUnitInfo & srcImgInfo)168 bool ColorFormatProcess::CheckColorProcessInputInfo(const ImageUnitInfo& srcImgInfo)
169 {
170     return srcImgInfo.colorFormat == sourceConfig_.GetVideoformat() &&
171         srcImgInfo.alignedWidth == sourceConfig_.GetWidth() &&
172         srcImgInfo.alignedHeight == sourceConfig_.GetHeight() &&
173         IsCorrectImageUnitInfo(srcImgInfo);
174 }
175 
CheckColorConvertInfo(const ImageUnitInfo & srcImgInfo,const ImageUnitInfo & dstImgInfo)176 bool ColorFormatProcess::CheckColorConvertInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo)
177 {
178     if (srcImgInfo.imgData == nullptr || dstImgInfo.imgData == nullptr) {
179         DHLOGE("The imgData of srcImgInfo or the imgData of dstImgInfo are null!");
180         return false;
181     }
182     if (srcImgInfo.colorFormat != Videoformat::NV12 && dstImgInfo.colorFormat != Videoformat::NV21) {
183         DHLOGE("CopyInfo error : srcImgInfo colorFormat %d, dstImgInfo colorFormat %d.",
184             srcImgInfo.colorFormat, dstImgInfo.colorFormat);
185         return false;
186     }
187 
188     if (!IsCorrectImageUnitInfo(srcImgInfo)) {
189         DHLOGE("srcImginfo fail: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " +
190             "imgSize %lld.", srcImgInfo.width, srcImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight,
191             srcImgInfo.chromaOffset, srcImgInfo.imgSize);
192         return false;
193     }
194     if (!IsCorrectImageUnitInfo(dstImgInfo)) {
195         DHLOGE("dstImginfo fail: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " +
196             "imgSize %lld.", dstImgInfo.width, dstImgInfo.height, dstImgInfo.alignedWidth, dstImgInfo.alignedHeight,
197             dstImgInfo.chromaOffset, dstImgInfo.imgSize);
198         return false;
199     }
200 
201     if (dstImgInfo.width > srcImgInfo.alignedWidth || dstImgInfo.height > srcImgInfo.alignedHeight) {
202         DHLOGE("Comparison ImgInfo fail: dstwidth %d, dstheight %d, srcAlignedWidth %d, srcAlignedHeight %d.",
203             dstImgInfo.width, dstImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight);
204         return false;
205     }
206     return true;
207 }
208 
IsCorrectImageUnitInfo(const ImageUnitInfo & imgInfo)209 bool ColorFormatProcess::IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo)
210 {
211     size_t expectedImgSize = static_cast<size_t>(imgInfo.alignedWidth * imgInfo.alignedHeight *
212                                                  YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
213     size_t expectedChromaOffset = static_cast<size_t>(imgInfo.alignedWidth * imgInfo.alignedHeight);
214     return (imgInfo.width <= imgInfo.alignedWidth && imgInfo.height <= imgInfo.alignedHeight &&
215         imgInfo.imgSize >= expectedImgSize && imgInfo.chromaOffset == expectedChromaOffset);
216 }
217 
218 /**
219 * @brief Separate a row of srcUVPlane into half a row of dstUPlane and half a row of dstVPlane. For example,
220 * converts the UVPlane memory arrangement of NV12 to the UV memory arrangement of YUVI420. Note that the
221 * stride and width of the dstImage must be the same.
222 */
SeparateUVPlaneByRow(const uint8_t * srcUVPlane,uint8_t * dstUPlane,uint8_t * dstVPlane,int32_t srcHalfWidth)223 void ColorFormatProcess::SeparateUVPlaneByRow(const uint8_t *srcUVPlane, uint8_t *dstUPlane, uint8_t *dstVPlane,
224     int32_t srcHalfWidth)
225 {
226     int32_t memoryOffset0 = 0;
227     int32_t memoryOffset1 = 1;
228     int32_t memoryOffset2 = 2;
229     int32_t memoryOffset3 = 3;
230     int32_t perSeparatebytes = 4;
231     for (int32_t x = 0; x < srcHalfWidth - 1; x += memoryOffset2) {
232         dstUPlane[x] = srcUVPlane[memoryOffset0];
233         dstUPlane[x + memoryOffset1] = srcUVPlane[memoryOffset2];
234         dstVPlane[x] = srcUVPlane[memoryOffset1];
235         dstVPlane[x + memoryOffset1] = srcUVPlane[memoryOffset3];
236         srcUVPlane += perSeparatebytes;
237     }
238     if (static_cast<uint32_t>(srcHalfWidth) & 1) {
239         dstUPlane[srcHalfWidth - 1] = srcUVPlane[memoryOffset0];
240         dstVPlane[srcHalfWidth - 1] = srcUVPlane[memoryOffset1];
241     }
242 }
243 
SeparateNV12UVPlane(const ImageUnitInfo & srcImgInfo,const ImageUnitInfo & dstImgInfo)244 int32_t ColorFormatProcess::SeparateNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo)
245 {
246     if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) {
247         DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed.");
248         return DCAMERA_BAD_VALUE;
249     }
250 
251     uint8_t *srcUVPlane = srcImgInfo.imgData + srcImgInfo.chromaOffset;
252     int32_t srcUVStride = srcImgInfo.alignedWidth;
253     uint8_t *dstUPlane = dstImgInfo.imgData + dstImgInfo.chromaOffset;
254     int32_t dstUStride = dstImgInfo.alignedWidth / Y2UV_RATIO;
255     uint8_t *dstVPlane = dstUPlane + (dstImgInfo.chromaOffset / Y2UV_RATIO) / Y2UV_RATIO;
256     int32_t dstVStride = dstImgInfo.alignedWidth / Y2UV_RATIO;
257     int32_t width = srcImgInfo.width / Y2UV_RATIO;
258     int32_t height = srcImgInfo.height / Y2UV_RATIO;
259     DHLOGD("srcUVStride %d, dstUStride %d, dstVStride %d, src half width %d, src half height %d.",
260         srcUVStride, dstUStride, dstVStride, width, height);
261 
262     /* Negative height means invert the image. */
263     if (height < 0) {
264         height = -height;
265         dstUPlane = dstUPlane + (height - 1) * dstUStride;
266         dstVPlane = dstVPlane + (height - 1) * dstVStride;
267         dstUStride = -dstUStride;
268         dstVStride = -dstVStride;
269     }
270     /* No black border of srcImage and dstImage, and the strides of srcImage and dstImage are equal. */
271     if (srcUVStride == width * Y2UV_RATIO && dstUStride == width && dstVStride == width) {
272         SeparateUVPlaneByRow(srcUVPlane, dstUPlane, dstVPlane, width * height);
273         return DCAMERA_OK;
274     }
275     /* Black borders exist in srcImage or dstImage. */
276     for (int32_t y = 0; y < height; ++y) {
277         SeparateUVPlaneByRow(srcUVPlane, dstUPlane, dstVPlane, width);
278         dstUPlane += dstUStride;
279         dstVPlane += dstVStride;
280         srcUVPlane += srcUVStride;
281     }
282     return DCAMERA_OK;
283 }
284 
285 /**
286 * @brief Combine half a row of srcUPlane and half a row of srcVPlane into a row of dstUVPlane. For example,
287 * converts the UVPlane memory arrangement of YUVI420 to the UV memory arrangement of NV12. Note that the
288 * stride and width of the srcImage must be the same.
289 */
CombineUVPlaneByRow(const uint8_t * srcUPlane,const uint8_t * srcVPlane,uint8_t * dstUVPlane,int32_t dstHalfWidth)290 void ColorFormatProcess::CombineUVPlaneByRow(const uint8_t *srcUPlane, const uint8_t *srcVPlane, uint8_t *dstUVPlane,
291     int32_t dstHalfWidth)
292 {
293     int32_t memoryOffset0 = 0;
294     int32_t memoryOffset1 = 1;
295     int32_t memoryOffset2 = 2;
296     int32_t memoryOffset3 = 3;
297     int32_t perCombinebytes = 4;
298     for (int32_t x = 0; x < dstHalfWidth - 1; x += memoryOffset2) {
299         dstUVPlane[memoryOffset0] = srcUPlane[x];
300         dstUVPlane[memoryOffset1] = srcVPlane[x];
301         dstUVPlane[memoryOffset2] = srcUPlane[x + memoryOffset1];
302         dstUVPlane[memoryOffset3] = srcVPlane[x + memoryOffset1];
303         dstUVPlane += perCombinebytes;
304     }
305     if (static_cast<uint32_t>(dstHalfWidth) & 1) {
306         dstUVPlane[memoryOffset0] = srcUPlane[dstHalfWidth - 1];
307         dstUVPlane[memoryOffset1] = srcVPlane[dstHalfWidth - 1];
308     }
309 }
310 
CombineNV12UVPlane(const ImageUnitInfo & srcImgInfo,const ImageUnitInfo & dstImgInfo)311 int32_t ColorFormatProcess::CombineNV12UVPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo)
312 {
313     if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) {
314         DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed.");
315         return DCAMERA_BAD_VALUE;
316     }
317 
318     uint8_t *srcVPlane = srcImgInfo.imgData + srcImgInfo.chromaOffset;
319     int32_t srcVStride = srcImgInfo.alignedWidth / Y2UV_RATIO;
320     uint8_t *srcUPlane = srcVPlane + (srcImgInfo.chromaOffset / Y2UV_RATIO) / Y2UV_RATIO;
321     int32_t srcUStride = srcImgInfo.alignedWidth / Y2UV_RATIO;
322     uint8_t *dstUVPlane = dstImgInfo.imgData + dstImgInfo.chromaOffset;
323     int32_t dstUVStride = dstImgInfo.alignedWidth;
324     int32_t width = dstImgInfo.width / Y2UV_RATIO;
325     int32_t height = dstImgInfo.height / Y2UV_RATIO;
326     DHLOGD("srcUStride %d, srcVStride %d, dstUVStride %d, dst half width %d, dst half height %d.",
327         srcUStride, srcVStride, dstUVStride, width, height);
328 
329     /* Negative height means invert the image. */
330     if (height < 0) {
331         height = -height;
332         dstUVPlane = dstUVPlane + (height - 1) * dstUVStride;
333         dstUVStride = -dstUVStride;
334     }
335     /* No black border of srcImage and dstImage, and the strides of srcImage and dstImage are equal. */
336     if (srcUStride == width && srcVStride == width && dstUVStride == width * Y2UV_RATIO) {
337         CombineUVPlaneByRow(srcUPlane, srcVPlane, dstUVPlane, width * height);
338         return DCAMERA_OK;
339     }
340     /* Black borders exist in srcImage or dstImage. */
341     for (int32_t y = 0; y < height; ++y) {
342         CombineUVPlaneByRow(srcUPlane, srcVPlane, dstUVPlane, width);
343         srcUPlane += srcUStride;
344         srcVPlane += srcVStride;
345         dstUVPlane += dstUVStride;
346     }
347     return DCAMERA_OK;
348 }
349 
CopyYPlane(const ImageUnitInfo & srcImgInfo,const ImageUnitInfo & dstImgInfo)350 int32_t ColorFormatProcess::CopyYPlane(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo)
351 {
352     if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) {
353         DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed.");
354         return DCAMERA_BAD_VALUE;
355     }
356 
357     int32_t totalCopyYPlaneSize = dstImgInfo.alignedWidth * dstImgInfo.height;
358     if (srcImgInfo.alignedWidth == dstImgInfo.width && dstImgInfo.alignedWidth == dstImgInfo.width) {
359         /* No black border of srcImage and dstImage, and the strides of srcImage and dstImage are equal. */
360         errno_t err = memcpy_s(dstImgInfo.imgData, totalCopyYPlaneSize, srcImgInfo.imgData, totalCopyYPlaneSize);
361         if (err != EOK) {
362             DHLOGE("ColorConvert : memcpy_s CopyYPlaner failed by Coalesce rows.");
363             return DCAMERA_MEMORY_OPT_ERROR;
364         }
365     } else {
366         /* Black borders exist in srcImage or dstImage. */
367         int32_t srcDataOffset = 0;
368         int32_t dstDataOffset = 0;
369         for (int32_t yh = 0; yh < dstImgInfo.height; yh++) {
370             errno_t err = memcpy_s(dstImgInfo.imgData + dstDataOffset, totalCopyYPlaneSize - dstDataOffset,
371                 srcImgInfo.imgData + srcDataOffset, dstImgInfo.width);
372             if (err != EOK) {
373                 DHLOGE("memcpy_s YPlane in line[%d] failed.", yh);
374                 return DCAMERA_MEMORY_OPT_ERROR;
375             }
376             dstDataOffset += dstImgInfo.alignedWidth;
377             srcDataOffset += srcImgInfo.alignedWidth;
378         }
379         DHLOGD("ColorConvert :get valid yplane OK, srcImgInfo: alignedWidth %d, width %d, height %d. " +
380             "dstImgInfo: alignedWidth %d, width %d, height %d. dstDataOffset %d, srcDataOffset %d.",
381             srcImgInfo.alignedWidth, srcImgInfo.width, srcImgInfo.height, dstImgInfo.alignedWidth,
382             dstImgInfo.width, dstImgInfo.height, dstDataOffset, srcDataOffset);
383     }
384     return DCAMERA_OK;
385 }
386 
ColorConvertNV12ToNV21(const ImageUnitInfo & srcImgInfo,const ImageUnitInfo & dstImgInfo)387 int32_t ColorFormatProcess::ColorConvertNV12ToNV21(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo)
388 {
389     if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) {
390         DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed.");
391         return DCAMERA_BAD_VALUE;
392     }
393 
394     int32_t err = CopyYPlane(srcImgInfo, dstImgInfo);
395     if (err != DCAMERA_OK) {
396         DHLOGE("ColorConvertNV12ToNV21 : CopyYPlane failed.");
397         return err;
398     }
399 
400     std::shared_ptr<DataBuffer> tempPlaneYUV = std::make_shared<DataBuffer>(dstImgInfo.imgSize);
401     ImageUnitInfo tempImgInfo = dstImgInfo;
402     tempImgInfo.imgData = tempPlaneYUV->Data();
403     SeparateNV12UVPlane(srcImgInfo, tempImgInfo);
404     CombineNV12UVPlane(tempImgInfo, dstImgInfo);
405     return DCAMERA_OK;
406 }
407 
ColorConvertNV12ToI420(const ImageUnitInfo & srcImgInfo,const ImageUnitInfo & dstImgInfo)408 int32_t ColorFormatProcess::ColorConvertNV12ToI420(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo)
409 {
410     if (!CheckColorConvertInfo(srcImgInfo, dstImgInfo)) {
411         DHLOGE("ColorFormatProcess : CheckColorConvertInfo failed.");
412         return DCAMERA_BAD_VALUE;
413     }
414 
415     int32_t err = CopyYPlane(srcImgInfo, dstImgInfo);
416     if (err != DCAMERA_OK) {
417         DHLOGE("ColorConvertNV12ToNV21 : CopyYPlane failed.");
418         return err;
419     }
420 
421     SeparateNV12UVPlane(srcImgInfo, dstImgInfo);
422     return DCAMERA_OK;
423 }
424 
ColorConvertByColorFormat(const ImageUnitInfo & srcImgInfo,const ImageUnitInfo & dstImgInfo)425 int32_t ColorFormatProcess::ColorConvertByColorFormat(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo)
426 {
427     int32_t ret;
428     switch (srcImgInfo.colorFormat) {
429         case Videoformat::NV12:
430             switch (dstImgInfo.colorFormat) {
431                 case Videoformat::NV21:
432                     ret = ColorConvertNV12ToNV21(srcImgInfo, dstImgInfo);
433                     break;
434                 case Videoformat::YUVI420:
435                     ret = ColorConvertNV12ToI420(srcImgInfo, dstImgInfo);
436                     break;
437                 default:
438                     DHLOGE("Unsupport ColorConvert %d to %d.", srcImgInfo.colorFormat, dstImgInfo.colorFormat);
439                     return DCAMERA_BAD_OPERATE;
440             }
441             break;
442         case Videoformat::NV21:
443         case Videoformat::YUVI420:
444         case Videoformat::RGBA_8888:
445             DHLOGE("Unsupport ColorConvert %d to %d.", srcImgInfo.colorFormat, dstImgInfo.colorFormat);
446             return DCAMERA_BAD_OPERATE;
447     }
448     return ret;
449 }
450 
ColorFormatDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)451 int32_t ColorFormatProcess::ColorFormatDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
452 {
453     DHLOGD("ColorFormat Done.");
454     if (outputBuffers.empty()) {
455         DHLOGE("The received data buffers is empty.");
456         return DCAMERA_BAD_VALUE;
457     }
458 
459     if (nextDataProcess_ != nullptr) {
460         DHLOGD("Send to the next node of the decoder for processing.");
461         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
462         if (err != DCAMERA_OK) {
463             DHLOGE("Someone node after the decoder processes failed.");
464         }
465         return err;
466     }
467     DHLOGD("The current node is the last node, and Output the processed video buffer");
468     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
469     if (targetPipelineSource == nullptr) {
470         DHLOGE("callbackPipelineSource_ is nullptr.");
471         return DCAMERA_BAD_VALUE;
472     }
473     targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]);
474     return DCAMERA_OK;
475 }
476 } // namespace DistributedHardware
477 } // namespace OHOS
478