• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright 2011 The LibYuv Project Authors. All rights reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS. All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "libyuv/convert.h"
12 #include "libyuv/convert_argb.h"
13 
14 #ifdef HAVE_JPEG
15 #include "libyuv/mjpeg_decoder.h"
16 #endif
17 
18 #ifdef __cplusplus
19 namespace libyuv {
20 extern "C" {
21 #endif
22 
23 #ifdef HAVE_JPEG
24 struct I420Buffers {
25   uint8* y;
26   int y_stride;
27   uint8* u;
28   int u_stride;
29   uint8* v;
30   int v_stride;
31   int w;
32   int h;
33 };
34 
JpegCopyI420(void * opaque,const uint8 * const * data,const int * strides,int rows)35 static void JpegCopyI420(void* opaque,
36                          const uint8* const* data,
37                          const int* strides,
38                          int rows) {
39   I420Buffers* dest = (I420Buffers*)(opaque);
40   I420Copy(data[0], strides[0], data[1], strides[1], data[2], strides[2],
41            dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
42            dest->v_stride, dest->w, rows);
43   dest->y += rows * dest->y_stride;
44   dest->u += ((rows + 1) >> 1) * dest->u_stride;
45   dest->v += ((rows + 1) >> 1) * dest->v_stride;
46   dest->h -= rows;
47 }
48 
JpegI422ToI420(void * opaque,const uint8 * const * data,const int * strides,int rows)49 static void JpegI422ToI420(void* opaque,
50                            const uint8* const* data,
51                            const int* strides,
52                            int rows) {
53   I420Buffers* dest = (I420Buffers*)(opaque);
54   I422ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2],
55              dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
56              dest->v_stride, dest->w, rows);
57   dest->y += rows * dest->y_stride;
58   dest->u += ((rows + 1) >> 1) * dest->u_stride;
59   dest->v += ((rows + 1) >> 1) * dest->v_stride;
60   dest->h -= rows;
61 }
62 
JpegI444ToI420(void * opaque,const uint8 * const * data,const int * strides,int rows)63 static void JpegI444ToI420(void* opaque,
64                            const uint8* const* data,
65                            const int* strides,
66                            int rows) {
67   I420Buffers* dest = (I420Buffers*)(opaque);
68   I444ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2],
69              dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
70              dest->v_stride, dest->w, rows);
71   dest->y += rows * dest->y_stride;
72   dest->u += ((rows + 1) >> 1) * dest->u_stride;
73   dest->v += ((rows + 1) >> 1) * dest->v_stride;
74   dest->h -= rows;
75 }
76 
JpegI400ToI420(void * opaque,const uint8 * const * data,const int * strides,int rows)77 static void JpegI400ToI420(void* opaque,
78                            const uint8* const* data,
79                            const int* strides,
80                            int rows) {
81   I420Buffers* dest = (I420Buffers*)(opaque);
82   I400ToI420(data[0], strides[0], dest->y, dest->y_stride, dest->u,
83              dest->u_stride, dest->v, dest->v_stride, dest->w, rows);
84   dest->y += rows * dest->y_stride;
85   dest->u += ((rows + 1) >> 1) * dest->u_stride;
86   dest->v += ((rows + 1) >> 1) * dest->v_stride;
87   dest->h -= rows;
88 }
89 
90 // Query size of MJPG in pixels.
91 LIBYUV_API
MJPGSize(const uint8 * sample,size_t sample_size,int * width,int * height)92 int MJPGSize(const uint8* sample, size_t sample_size, int* width, int* height) {
93   MJpegDecoder mjpeg_decoder;
94   LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
95   if (ret) {
96     *width = mjpeg_decoder.GetWidth();
97     *height = mjpeg_decoder.GetHeight();
98   }
99   mjpeg_decoder.UnloadFrame();
100   return ret ? 0 : -1;  // -1 for runtime failure.
101 }
102 
103 // MJPG (Motion JPeg) to I420
104 // TODO(fbarchard): review w and h requirement. dw and dh may be enough.
105 LIBYUV_API
MJPGToI420(const uint8 * sample,size_t sample_size,uint8 * y,int y_stride,uint8 * u,int u_stride,uint8 * v,int v_stride,int w,int h,int dw,int dh)106 int MJPGToI420(const uint8* sample,
107                size_t sample_size,
108                uint8* y,
109                int y_stride,
110                uint8* u,
111                int u_stride,
112                uint8* v,
113                int v_stride,
114                int w,
115                int h,
116                int dw,
117                int dh) {
118   if (sample_size == kUnknownDataSize) {
119     // ERROR: MJPEG frame size unknown
120     return -1;
121   }
122 
123   // TODO(fbarchard): Port MJpeg to C.
124   MJpegDecoder mjpeg_decoder;
125   LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
126   if (ret &&
127       (mjpeg_decoder.GetWidth() != w || mjpeg_decoder.GetHeight() != h)) {
128     // ERROR: MJPEG frame has unexpected dimensions
129     mjpeg_decoder.UnloadFrame();
130     return 1;  // runtime failure
131   }
132   if (ret) {
133     I420Buffers bufs = {y, y_stride, u, u_stride, v, v_stride, dw, dh};
134     // YUV420
135     if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
136         mjpeg_decoder.GetNumComponents() == 3 &&
137         mjpeg_decoder.GetVertSampFactor(0) == 2 &&
138         mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
139         mjpeg_decoder.GetVertSampFactor(1) == 1 &&
140         mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
141         mjpeg_decoder.GetVertSampFactor(2) == 1 &&
142         mjpeg_decoder.GetHorizSampFactor(2) == 1) {
143       ret = mjpeg_decoder.DecodeToCallback(&JpegCopyI420, &bufs, dw, dh);
144       // YUV422
145     } else if (mjpeg_decoder.GetColorSpace() ==
146                    MJpegDecoder::kColorSpaceYCbCr &&
147                mjpeg_decoder.GetNumComponents() == 3 &&
148                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
149                mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
150                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
151                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
152                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
153                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
154       ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToI420, &bufs, dw, dh);
155       // YUV444
156     } else if (mjpeg_decoder.GetColorSpace() ==
157                    MJpegDecoder::kColorSpaceYCbCr &&
158                mjpeg_decoder.GetNumComponents() == 3 &&
159                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
160                mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
161                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
162                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
163                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
164                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
165       ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToI420, &bufs, dw, dh);
166       // YUV400
167     } else if (mjpeg_decoder.GetColorSpace() ==
168                    MJpegDecoder::kColorSpaceGrayscale &&
169                mjpeg_decoder.GetNumComponents() == 1 &&
170                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
171                mjpeg_decoder.GetHorizSampFactor(0) == 1) {
172       ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToI420, &bufs, dw, dh);
173     } else {
174       // TODO(fbarchard): Implement conversion for any other colorspace/sample
175       // factors that occur in practice.
176       // ERROR: Unable to convert MJPEG frame because format is not supported
177       mjpeg_decoder.UnloadFrame();
178       return 1;
179     }
180   }
181   return ret ? 0 : 1;
182 }
183 
184 #ifdef HAVE_JPEG
185 struct ARGBBuffers {
186   uint8* argb;
187   int argb_stride;
188   int w;
189   int h;
190 };
191 
JpegI420ToARGB(void * opaque,const uint8 * const * data,const int * strides,int rows)192 static void JpegI420ToARGB(void* opaque,
193                            const uint8* const* data,
194                            const int* strides,
195                            int rows) {
196   ARGBBuffers* dest = (ARGBBuffers*)(opaque);
197   I420ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
198              dest->argb, dest->argb_stride, dest->w, rows);
199   dest->argb += rows * dest->argb_stride;
200   dest->h -= rows;
201 }
202 
JpegI422ToARGB(void * opaque,const uint8 * const * data,const int * strides,int rows)203 static void JpegI422ToARGB(void* opaque,
204                            const uint8* const* data,
205                            const int* strides,
206                            int rows) {
207   ARGBBuffers* dest = (ARGBBuffers*)(opaque);
208   I422ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
209              dest->argb, dest->argb_stride, dest->w, rows);
210   dest->argb += rows * dest->argb_stride;
211   dest->h -= rows;
212 }
213 
JpegI444ToARGB(void * opaque,const uint8 * const * data,const int * strides,int rows)214 static void JpegI444ToARGB(void* opaque,
215                            const uint8* const* data,
216                            const int* strides,
217                            int rows) {
218   ARGBBuffers* dest = (ARGBBuffers*)(opaque);
219   I444ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
220              dest->argb, dest->argb_stride, dest->w, rows);
221   dest->argb += rows * dest->argb_stride;
222   dest->h -= rows;
223 }
224 
JpegI400ToARGB(void * opaque,const uint8 * const * data,const int * strides,int rows)225 static void JpegI400ToARGB(void* opaque,
226                            const uint8* const* data,
227                            const int* strides,
228                            int rows) {
229   ARGBBuffers* dest = (ARGBBuffers*)(opaque);
230   I400ToARGB(data[0], strides[0], dest->argb, dest->argb_stride, dest->w, rows);
231   dest->argb += rows * dest->argb_stride;
232   dest->h -= rows;
233 }
234 
235 // MJPG (Motion JPeg) to ARGB
236 // TODO(fbarchard): review w and h requirement. dw and dh may be enough.
237 LIBYUV_API
MJPGToARGB(const uint8 * sample,size_t sample_size,uint8 * argb,int argb_stride,int w,int h,int dw,int dh)238 int MJPGToARGB(const uint8* sample,
239                size_t sample_size,
240                uint8* argb,
241                int argb_stride,
242                int w,
243                int h,
244                int dw,
245                int dh) {
246   if (sample_size == kUnknownDataSize) {
247     // ERROR: MJPEG frame size unknown
248     return -1;
249   }
250 
251   // TODO(fbarchard): Port MJpeg to C.
252   MJpegDecoder mjpeg_decoder;
253   LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
254   if (ret &&
255       (mjpeg_decoder.GetWidth() != w || mjpeg_decoder.GetHeight() != h)) {
256     // ERROR: MJPEG frame has unexpected dimensions
257     mjpeg_decoder.UnloadFrame();
258     return 1;  // runtime failure
259   }
260   if (ret) {
261     ARGBBuffers bufs = {argb, argb_stride, dw, dh};
262     // YUV420
263     if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
264         mjpeg_decoder.GetNumComponents() == 3 &&
265         mjpeg_decoder.GetVertSampFactor(0) == 2 &&
266         mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
267         mjpeg_decoder.GetVertSampFactor(1) == 1 &&
268         mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
269         mjpeg_decoder.GetVertSampFactor(2) == 1 &&
270         mjpeg_decoder.GetHorizSampFactor(2) == 1) {
271       ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToARGB, &bufs, dw, dh);
272       // YUV422
273     } else if (mjpeg_decoder.GetColorSpace() ==
274                    MJpegDecoder::kColorSpaceYCbCr &&
275                mjpeg_decoder.GetNumComponents() == 3 &&
276                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
277                mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
278                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
279                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
280                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
281                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
282       ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToARGB, &bufs, dw, dh);
283       // YUV444
284     } else if (mjpeg_decoder.GetColorSpace() ==
285                    MJpegDecoder::kColorSpaceYCbCr &&
286                mjpeg_decoder.GetNumComponents() == 3 &&
287                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
288                mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
289                mjpeg_decoder.GetVertSampFactor(1) == 1 &&
290                mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
291                mjpeg_decoder.GetVertSampFactor(2) == 1 &&
292                mjpeg_decoder.GetHorizSampFactor(2) == 1) {
293       ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToARGB, &bufs, dw, dh);
294       // YUV400
295     } else if (mjpeg_decoder.GetColorSpace() ==
296                    MJpegDecoder::kColorSpaceGrayscale &&
297                mjpeg_decoder.GetNumComponents() == 1 &&
298                mjpeg_decoder.GetVertSampFactor(0) == 1 &&
299                mjpeg_decoder.GetHorizSampFactor(0) == 1) {
300       ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToARGB, &bufs, dw, dh);
301     } else {
302       // TODO(fbarchard): Implement conversion for any other colorspace/sample
303       // factors that occur in practice.
304       // ERROR: Unable to convert MJPEG frame because format is not supported
305       mjpeg_decoder.UnloadFrame();
306       return 1;
307     }
308   }
309   return ret ? 0 : 1;
310 }
311 #endif
312 
313 #endif
314 
315 #ifdef __cplusplus
316 }  // extern "C"
317 }  // namespace libyuv
318 #endif
319