• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright 2011 The LibYuv Project Authors. All rights reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS. All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "libyuv/convert.h"
12 #include "libyuv/convert_argb.h"
13 
14 #ifdef HAVE_JPEG
15 #include "libyuv/mjpeg_decoder.h"
16 #endif
17 
18 #ifdef __cplusplus
19 namespace libyuv {
20 extern "C" {
21 #endif
22 
23 #ifdef HAVE_JPEG
24 struct I420Buffers {
25   uint8_t* y;
26   int y_stride;
27   uint8_t* u;
28   int u_stride;
29   uint8_t* v;
30   int v_stride;
31   int w;
32   int h;
33 };
34 
JpegCopyI420(void * opaque,const uint8_t * const * data,const int * strides,int rows)35 static void JpegCopyI420(void* opaque,
36                          const uint8_t* const* data,
37                          const int* strides,
38                          int rows) {
39   I420Buffers* dest = (I420Buffers*)(opaque);
40   I420Copy(data[0], strides[0], data[1], strides[1], data[2], strides[2],
41            dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
42            dest->v_stride, dest->w, rows);
43   dest->y += rows * dest->y_stride;
44   dest->u += ((rows + 1) >> 1) * dest->u_stride;
45   dest->v += ((rows + 1) >> 1) * dest->v_stride;
46   dest->h -= rows;
47 }
48 
JpegI422ToI420(void * opaque,const uint8_t * const * data,const int * strides,int rows)49 static void JpegI422ToI420(void* opaque,
50                            const uint8_t* const* data,
51                            const int* strides,
52                            int rows) {
53   I420Buffers* dest = (I420Buffers*)(opaque);
54   I422ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2],
55              dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
56              dest->v_stride, dest->w, rows);
57   dest->y += rows * dest->y_stride;
58   dest->u += ((rows + 1) >> 1) * dest->u_stride;
59   dest->v += ((rows + 1) >> 1) * dest->v_stride;
60   dest->h -= rows;
61 }
62 
JpegI444ToI420(void * opaque,const uint8_t * const * data,const int * strides,int rows)63 static void JpegI444ToI420(void* opaque,
64                            const uint8_t* const* data,
65                            const int* strides,
66                            int rows) {
67   I420Buffers* dest = (I420Buffers*)(opaque);
68   I444ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2],
69              dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
70              dest->v_stride, dest->w, rows);
71   dest->y += rows * dest->y_stride;
72   dest->u += ((rows + 1) >> 1) * dest->u_stride;
73   dest->v += ((rows + 1) >> 1) * dest->v_stride;
74   dest->h -= rows;
75 }
76 
JpegI400ToI420(void * opaque,const uint8_t * const * data,const int * strides,int rows)77 static void JpegI400ToI420(void* opaque,
78                            const uint8_t* const* data,
79                            const int* strides,
80                            int rows) {
81   I420Buffers* dest = (I420Buffers*)(opaque);
82   I400ToI420(data[0], strides[0], dest->y, dest->y_stride, dest->u,
83              dest->u_stride, dest->v, dest->v_stride, dest->w, rows);
84   dest->y += rows * dest->y_stride;
85   dest->u += ((rows + 1) >> 1) * dest->u_stride;
86   dest->v += ((rows + 1) >> 1) * dest->v_stride;
87   dest->h -= rows;
88 }
89 
90 // Query size of MJPG in pixels.
91 LIBYUV_API
MJPGSize(const uint8_t * src_mjpg,size_t src_size_mjpg,int * width,int * height)92 int MJPGSize(const uint8_t* src_mjpg,
93              size_t src_size_mjpg,
94              int* width,
95              int* height) {
96   MJpegDecoder mjpeg_decoder;
97   LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(src_mjpg, src_size_mjpg);
98   if (ret) {
99     *width = mjpeg_decoder.GetWidth();
100     *height = mjpeg_decoder.GetHeight();
101   }
102   mjpeg_decoder.UnloadFrame();
103   return ret ? 0 : -1;  // -1 for runtime failure.
104 }
105 
106 // MJPG (Motion JPeg) to I420
107 // TODO(fbarchard): review src_width and src_height requirement. dst_width and
108 // dst_height may be enough.
109 LIBYUV_API
MJPGToI420(const uint8_t * src_mjpg,size_t src_size_mjpg,uint8_t * dst_y,int dst_stride_y,uint8_t * dst_u,int dst_stride_u,uint8_t * dst_v,int dst_stride_v,int src_width,int src_height,int dst_width,int dst_height)110 int MJPGToI420(const uint8_t* src_mjpg,
111                size_t src_size_mjpg,
112                uint8_t* dst_y,
113                int dst_stride_y,
114                uint8_t* dst_u,
115                int dst_stride_u,
116                uint8_t* dst_v,
117                int dst_stride_v,
118                int src_width,
119                int src_height,
120                int dst_width,
121                int dst_height) {
122   if (src_size_mjpg == kUnknownDataSize) {
123     // ERROR: MJPEG frame size unknown
124     return -1;
125   }
126 
127   // TODO(fbarchard): Port MJpeg to C.
128   MJpegDecoder mjpeg_decoder;
129   LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(src_mjpg, src_size_mjpg);
130   if (ret && (mjpeg_decoder.GetWidth() != src_width ||
131               mjpeg_decoder.GetHeight() != src_height)) {
132     // ERROR: MJPEG frame has unexpected dimensions
133     mjpeg_decoder.UnloadFrame();
134     return 1;  // runtime failure
135   }
136   if (ret) {
137     I420Buffers bufs = {dst_y, dst_stride_y, dst_u,     dst_stride_u,
138                         dst_v, dst_stride_v, dst_width, dst_height};
139     // YUV420
140     if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
141         mjpeg_decoder.GetNumComponents() == 3 &&
142         mjpeg_decoder.GetVertSampFactor(0) == 2 &&
143         mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
144         mjpeg_decoder.GetVertSampFactor(1) == 1 &&
145         mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
146         mjpeg_decoder.GetVertSampFactor(2) == 1 &&
147         mjpeg_decoder.GetHorizSampFactor(2) == 1) {
148       ret = mjpeg_decoder.DecodeToCallback(&JpegCopyI420, &bufs, dst_width,
149                                            dst_height);
150       // YUV422
151     } else if (mjpeg_decoder.GetColorSpace() ==
152                    MJpegDecoder::kColorSpaceYCbCr &&
153                mjpeg_decoder.GetNumComponents() == 3 &&
154                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
155                mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
156                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
157                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
158                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
159                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
160       ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToI420, &bufs, dst_width,
161                                            dst_height);
162       // YUV444
163     } else if (mjpeg_decoder.GetColorSpace() ==
164                    MJpegDecoder::kColorSpaceYCbCr &&
165                mjpeg_decoder.GetNumComponents() == 3 &&
166                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
167                mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
168                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
169                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
170                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
171                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
172       ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToI420, &bufs, dst_width,
173                                            dst_height);
174       // YUV400
175     } else if (mjpeg_decoder.GetColorSpace() ==
176                    MJpegDecoder::kColorSpaceGrayscale &&
177                mjpeg_decoder.GetNumComponents() == 1 &&
178                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
179                mjpeg_decoder.GetHorizSampFactor(0) == 1) {
180       ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToI420, &bufs, dst_width,
181                                            dst_height);
182     } else {
183       // TODO(fbarchard): Implement conversion for any other
184       // colorspace/subsample factors that occur in practice. ERROR: Unable to
185       // convert MJPEG frame because format is not supported
186       mjpeg_decoder.UnloadFrame();
187       return 1;
188     }
189   }
190   return ret ? 0 : 1;
191 }
192 
193 struct NV21Buffers {
194   uint8_t* y;
195   int y_stride;
196   uint8_t* vu;
197   int vu_stride;
198   int w;
199   int h;
200 };
201 
JpegI420ToNV21(void * opaque,const uint8_t * const * data,const int * strides,int rows)202 static void JpegI420ToNV21(void* opaque,
203                            const uint8_t* const* data,
204                            const int* strides,
205                            int rows) {
206   NV21Buffers* dest = (NV21Buffers*)(opaque);
207   I420ToNV21(data[0], strides[0], data[1], strides[1], data[2], strides[2],
208              dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows);
209   dest->y += rows * dest->y_stride;
210   dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
211   dest->h -= rows;
212 }
213 
JpegI422ToNV21(void * opaque,const uint8_t * const * data,const int * strides,int rows)214 static void JpegI422ToNV21(void* opaque,
215                            const uint8_t* const* data,
216                            const int* strides,
217                            int rows) {
218   NV21Buffers* dest = (NV21Buffers*)(opaque);
219   I422ToNV21(data[0], strides[0], data[1], strides[1], data[2], strides[2],
220              dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows);
221   dest->y += rows * dest->y_stride;
222   dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
223   dest->h -= rows;
224 }
225 
JpegI444ToNV21(void * opaque,const uint8_t * const * data,const int * strides,int rows)226 static void JpegI444ToNV21(void* opaque,
227                            const uint8_t* const* data,
228                            const int* strides,
229                            int rows) {
230   NV21Buffers* dest = (NV21Buffers*)(opaque);
231   I444ToNV21(data[0], strides[0], data[1], strides[1], data[2], strides[2],
232              dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows);
233   dest->y += rows * dest->y_stride;
234   dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
235   dest->h -= rows;
236 }
237 
JpegI400ToNV21(void * opaque,const uint8_t * const * data,const int * strides,int rows)238 static void JpegI400ToNV21(void* opaque,
239                            const uint8_t* const* data,
240                            const int* strides,
241                            int rows) {
242   NV21Buffers* dest = (NV21Buffers*)(opaque);
243   I400ToNV21(data[0], strides[0], dest->y, dest->y_stride, dest->vu,
244              dest->vu_stride, dest->w, rows);
245   dest->y += rows * dest->y_stride;
246   dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
247   dest->h -= rows;
248 }
249 
250 // MJPG (Motion JPeg) to NV21
251 LIBYUV_API
MJPGToNV21(const uint8_t * src_mjpg,size_t src_size_mjpg,uint8_t * dst_y,int dst_stride_y,uint8_t * dst_vu,int dst_stride_vu,int src_width,int src_height,int dst_width,int dst_height)252 int MJPGToNV21(const uint8_t* src_mjpg,
253                size_t src_size_mjpg,
254                uint8_t* dst_y,
255                int dst_stride_y,
256                uint8_t* dst_vu,
257                int dst_stride_vu,
258                int src_width,
259                int src_height,
260                int dst_width,
261                int dst_height) {
262   if (src_size_mjpg == kUnknownDataSize) {
263     // ERROR: MJPEG frame size unknown
264     return -1;
265   }
266 
267   // TODO(fbarchard): Port MJpeg to C.
268   MJpegDecoder mjpeg_decoder;
269   LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(src_mjpg, src_size_mjpg);
270   if (ret && (mjpeg_decoder.GetWidth() != src_width ||
271               mjpeg_decoder.GetHeight() != src_height)) {
272     // ERROR: MJPEG frame has unexpected dimensions
273     mjpeg_decoder.UnloadFrame();
274     return 1;  // runtime failure
275   }
276   if (ret) {
277     NV21Buffers bufs = {dst_y,         dst_stride_y, dst_vu,
278                         dst_stride_vu, dst_width,    dst_height};
279     // YUV420
280     if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
281         mjpeg_decoder.GetNumComponents() == 3 &&
282         mjpeg_decoder.GetVertSampFactor(0) == 2 &&
283         mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
284         mjpeg_decoder.GetVertSampFactor(1) == 1 &&
285         mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
286         mjpeg_decoder.GetVertSampFactor(2) == 1 &&
287         mjpeg_decoder.GetHorizSampFactor(2) == 1) {
288       ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToNV21, &bufs, dst_width,
289                                            dst_height);
290       // YUV422
291     } else if (mjpeg_decoder.GetColorSpace() ==
292                    MJpegDecoder::kColorSpaceYCbCr &&
293                mjpeg_decoder.GetNumComponents() == 3 &&
294                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
295                mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
296                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
297                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
298                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
299                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
300       ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToNV21, &bufs, dst_width,
301                                            dst_height);
302       // YUV444
303     } else if (mjpeg_decoder.GetColorSpace() ==
304                    MJpegDecoder::kColorSpaceYCbCr &&
305                mjpeg_decoder.GetNumComponents() == 3 &&
306                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
307                mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
308                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
309                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
310                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
311                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
312       ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToNV21, &bufs, dst_width,
313                                            dst_height);
314       // YUV400
315     } else if (mjpeg_decoder.GetColorSpace() ==
316                    MJpegDecoder::kColorSpaceGrayscale &&
317                mjpeg_decoder.GetNumComponents() == 1 &&
318                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
319                mjpeg_decoder.GetHorizSampFactor(0) == 1) {
320       ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToNV21, &bufs, dst_width,
321                                            dst_height);
322     } else {
323       // Unknown colorspace.
324       mjpeg_decoder.UnloadFrame();
325       return 1;
326     }
327   }
328   return ret ? 0 : 1;
329 }
330 
JpegI420ToNV12(void * opaque,const uint8_t * const * data,const int * strides,int rows)331 static void JpegI420ToNV12(void* opaque,
332                            const uint8_t* const* data,
333                            const int* strides,
334                            int rows) {
335   NV21Buffers* dest = (NV21Buffers*)(opaque);
336   // Use NV21 with VU swapped.
337   I420ToNV21(data[0], strides[0], data[2], strides[2], data[1], strides[1],
338              dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows);
339   dest->y += rows * dest->y_stride;
340   dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
341   dest->h -= rows;
342 }
343 
JpegI422ToNV12(void * opaque,const uint8_t * const * data,const int * strides,int rows)344 static void JpegI422ToNV12(void* opaque,
345                            const uint8_t* const* data,
346                            const int* strides,
347                            int rows) {
348   NV21Buffers* dest = (NV21Buffers*)(opaque);
349   // Use NV21 with VU swapped.
350   I422ToNV21(data[0], strides[0], data[2], strides[2], data[1], strides[1],
351              dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows);
352   dest->y += rows * dest->y_stride;
353   dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
354   dest->h -= rows;
355 }
356 
JpegI444ToNV12(void * opaque,const uint8_t * const * data,const int * strides,int rows)357 static void JpegI444ToNV12(void* opaque,
358                            const uint8_t* const* data,
359                            const int* strides,
360                            int rows) {
361   NV21Buffers* dest = (NV21Buffers*)(opaque);
362   // Use NV21 with VU swapped.
363   I444ToNV21(data[0], strides[0], data[2], strides[2], data[1], strides[1],
364              dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows);
365   dest->y += rows * dest->y_stride;
366   dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
367   dest->h -= rows;
368 }
369 
JpegI400ToNV12(void * opaque,const uint8_t * const * data,const int * strides,int rows)370 static void JpegI400ToNV12(void* opaque,
371                            const uint8_t* const* data,
372                            const int* strides,
373                            int rows) {
374   NV21Buffers* dest = (NV21Buffers*)(opaque);
375   // Use NV21 since there is no UV plane.
376   I400ToNV21(data[0], strides[0], dest->y, dest->y_stride, dest->vu,
377              dest->vu_stride, dest->w, rows);
378   dest->y += rows * dest->y_stride;
379   dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
380   dest->h -= rows;
381 }
382 
383 // MJPG (Motion JPEG) to NV12.
384 LIBYUV_API
MJPGToNV12(const uint8_t * sample,size_t sample_size,uint8_t * dst_y,int dst_stride_y,uint8_t * dst_uv,int dst_stride_uv,int src_width,int src_height,int dst_width,int dst_height)385 int MJPGToNV12(const uint8_t* sample,
386                size_t sample_size,
387                uint8_t* dst_y,
388                int dst_stride_y,
389                uint8_t* dst_uv,
390                int dst_stride_uv,
391                int src_width,
392                int src_height,
393                int dst_width,
394                int dst_height) {
395   if (sample_size == kUnknownDataSize) {
396     // ERROR: MJPEG frame size unknown
397     return -1;
398   }
399 
400   // TODO(fbarchard): Port MJpeg to C.
401   MJpegDecoder mjpeg_decoder;
402   LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
403   if (ret && (mjpeg_decoder.GetWidth() != src_width ||
404               mjpeg_decoder.GetHeight() != src_height)) {
405     // ERROR: MJPEG frame has unexpected dimensions
406     mjpeg_decoder.UnloadFrame();
407     return 1;  // runtime failure
408   }
409   if (ret) {
410     // Use NV21Buffers but with UV instead of VU.
411     NV21Buffers bufs = {dst_y,         dst_stride_y, dst_uv,
412                         dst_stride_uv, dst_width,    dst_height};
413     // YUV420
414     if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
415         mjpeg_decoder.GetNumComponents() == 3 &&
416         mjpeg_decoder.GetVertSampFactor(0) == 2 &&
417         mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
418         mjpeg_decoder.GetVertSampFactor(1) == 1 &&
419         mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
420         mjpeg_decoder.GetVertSampFactor(2) == 1 &&
421         mjpeg_decoder.GetHorizSampFactor(2) == 1) {
422       ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToNV12, &bufs, dst_width,
423                                            dst_height);
424       // YUV422
425     } else if (mjpeg_decoder.GetColorSpace() ==
426                    MJpegDecoder::kColorSpaceYCbCr &&
427                mjpeg_decoder.GetNumComponents() == 3 &&
428                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
429                mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
430                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
431                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
432                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
433                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
434       ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToNV12, &bufs, dst_width,
435                                            dst_height);
436       // YUV444
437     } else if (mjpeg_decoder.GetColorSpace() ==
438                    MJpegDecoder::kColorSpaceYCbCr &&
439                mjpeg_decoder.GetNumComponents() == 3 &&
440                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
441                mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
442                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
443                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
444                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
445                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
446       ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToNV12, &bufs, dst_width,
447                                            dst_height);
448       // YUV400
449     } else if (mjpeg_decoder.GetColorSpace() ==
450                    MJpegDecoder::kColorSpaceGrayscale &&
451                mjpeg_decoder.GetNumComponents() == 1 &&
452                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
453                mjpeg_decoder.GetHorizSampFactor(0) == 1) {
454       ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToNV12, &bufs, dst_width,
455                                            dst_height);
456     } else {
457       // Unknown colorspace.
458       mjpeg_decoder.UnloadFrame();
459       return 1;
460     }
461   }
462   return ret ? 0 : 1;
463 }
464 
465 struct ARGBBuffers {
466   uint8_t* argb;
467   int argb_stride;
468   int w;
469   int h;
470 };
471 
JpegI420ToARGB(void * opaque,const uint8_t * const * data,const int * strides,int rows)472 static void JpegI420ToARGB(void* opaque,
473                            const uint8_t* const* data,
474                            const int* strides,
475                            int rows) {
476   ARGBBuffers* dest = (ARGBBuffers*)(opaque);
477   I420ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
478              dest->argb, dest->argb_stride, dest->w, rows);
479   dest->argb += rows * dest->argb_stride;
480   dest->h -= rows;
481 }
482 
JpegI422ToARGB(void * opaque,const uint8_t * const * data,const int * strides,int rows)483 static void JpegI422ToARGB(void* opaque,
484                            const uint8_t* const* data,
485                            const int* strides,
486                            int rows) {
487   ARGBBuffers* dest = (ARGBBuffers*)(opaque);
488   I422ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
489              dest->argb, dest->argb_stride, dest->w, rows);
490   dest->argb += rows * dest->argb_stride;
491   dest->h -= rows;
492 }
493 
JpegI444ToARGB(void * opaque,const uint8_t * const * data,const int * strides,int rows)494 static void JpegI444ToARGB(void* opaque,
495                            const uint8_t* const* data,
496                            const int* strides,
497                            int rows) {
498   ARGBBuffers* dest = (ARGBBuffers*)(opaque);
499   I444ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
500              dest->argb, dest->argb_stride, dest->w, rows);
501   dest->argb += rows * dest->argb_stride;
502   dest->h -= rows;
503 }
504 
JpegI400ToARGB(void * opaque,const uint8_t * const * data,const int * strides,int rows)505 static void JpegI400ToARGB(void* opaque,
506                            const uint8_t* const* data,
507                            const int* strides,
508                            int rows) {
509   ARGBBuffers* dest = (ARGBBuffers*)(opaque);
510   I400ToARGB(data[0], strides[0], dest->argb, dest->argb_stride, dest->w, rows);
511   dest->argb += rows * dest->argb_stride;
512   dest->h -= rows;
513 }
514 
515 // MJPG (Motion JPeg) to ARGB
516 // TODO(fbarchard): review src_width and src_height requirement. dst_width and
517 // dst_height may be enough.
518 LIBYUV_API
MJPGToARGB(const uint8_t * src_mjpg,size_t src_size_mjpg,uint8_t * dst_argb,int dst_stride_argb,int src_width,int src_height,int dst_width,int dst_height)519 int MJPGToARGB(const uint8_t* src_mjpg,
520                size_t src_size_mjpg,
521                uint8_t* dst_argb,
522                int dst_stride_argb,
523                int src_width,
524                int src_height,
525                int dst_width,
526                int dst_height) {
527   if (src_size_mjpg == kUnknownDataSize) {
528     // ERROR: MJPEG frame size unknown
529     return -1;
530   }
531 
532   // TODO(fbarchard): Port MJpeg to C.
533   MJpegDecoder mjpeg_decoder;
534   LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(src_mjpg, src_size_mjpg);
535   if (ret && (mjpeg_decoder.GetWidth() != src_width ||
536               mjpeg_decoder.GetHeight() != src_height)) {
537     // ERROR: MJPEG frame has unexpected dimensions
538     mjpeg_decoder.UnloadFrame();
539     return 1;  // runtime failure
540   }
541   if (ret) {
542     ARGBBuffers bufs = {dst_argb, dst_stride_argb, dst_width, dst_height};
543     // YUV420
544     if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
545         mjpeg_decoder.GetNumComponents() == 3 &&
546         mjpeg_decoder.GetVertSampFactor(0) == 2 &&
547         mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
548         mjpeg_decoder.GetVertSampFactor(1) == 1 &&
549         mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
550         mjpeg_decoder.GetVertSampFactor(2) == 1 &&
551         mjpeg_decoder.GetHorizSampFactor(2) == 1) {
552       ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToARGB, &bufs, dst_width,
553                                            dst_height);
554       // YUV422
555     } else if (mjpeg_decoder.GetColorSpace() ==
556                    MJpegDecoder::kColorSpaceYCbCr &&
557                mjpeg_decoder.GetNumComponents() == 3 &&
558                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
559                mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
560                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
561                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
562                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
563                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
564       ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToARGB, &bufs, dst_width,
565                                            dst_height);
566       // YUV444
567     } else if (mjpeg_decoder.GetColorSpace() ==
568                    MJpegDecoder::kColorSpaceYCbCr &&
569                mjpeg_decoder.GetNumComponents() == 3 &&
570                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
571                mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
572                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
573                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
574                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
575                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
576       ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToARGB, &bufs, dst_width,
577                                            dst_height);
578       // YUV400
579     } else if (mjpeg_decoder.GetColorSpace() ==
580                    MJpegDecoder::kColorSpaceGrayscale &&
581                mjpeg_decoder.GetNumComponents() == 1 &&
582                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
583                mjpeg_decoder.GetHorizSampFactor(0) == 1) {
584       ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToARGB, &bufs, dst_width,
585                                            dst_height);
586     } else {
587       // TODO(fbarchard): Implement conversion for any other
588       // colorspace/subsample factors that occur in practice. ERROR: Unable to
589       // convert MJPEG frame because format is not supported
590       mjpeg_decoder.UnloadFrame();
591       return 1;
592     }
593   }
594   return ret ? 0 : 1;
595 }
596 
597 #endif  // HAVE_JPEG
598 
599 #ifdef __cplusplus
600 }  // extern "C"
601 }  // namespace libyuv
602 #endif
603