1 /*
2 * Copyright 2011 The LibYuv Project Authors. All rights reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "libyuv/convert.h"
12 #include "libyuv/convert_argb.h"
13
14 #ifdef HAVE_JPEG
15 #include "libyuv/mjpeg_decoder.h"
16 #endif
17
18 #ifdef __cplusplus
19 namespace libyuv {
20 extern "C" {
21 #endif
22
23 #ifdef HAVE_JPEG
24 struct I420Buffers {
25 uint8* y;
26 int y_stride;
27 uint8* u;
28 int u_stride;
29 uint8* v;
30 int v_stride;
31 int w;
32 int h;
33 };
34
JpegCopyI420(void * opaque,const uint8 * const * data,const int * strides,int rows)35 static void JpegCopyI420(void* opaque,
36 const uint8* const* data,
37 const int* strides,
38 int rows) {
39 I420Buffers* dest = (I420Buffers*)(opaque);
40 I420Copy(data[0], strides[0],
41 data[1], strides[1],
42 data[2], strides[2],
43 dest->y, dest->y_stride,
44 dest->u, dest->u_stride,
45 dest->v, dest->v_stride,
46 dest->w, rows);
47 dest->y += rows * dest->y_stride;
48 dest->u += ((rows + 1) >> 1) * dest->u_stride;
49 dest->v += ((rows + 1) >> 1) * dest->v_stride;
50 dest->h -= rows;
51 }
52
JpegI422ToI420(void * opaque,const uint8 * const * data,const int * strides,int rows)53 static void JpegI422ToI420(void* opaque,
54 const uint8* const* data,
55 const int* strides,
56 int rows) {
57 I420Buffers* dest = (I420Buffers*)(opaque);
58 I422ToI420(data[0], strides[0],
59 data[1], strides[1],
60 data[2], strides[2],
61 dest->y, dest->y_stride,
62 dest->u, dest->u_stride,
63 dest->v, dest->v_stride,
64 dest->w, rows);
65 dest->y += rows * dest->y_stride;
66 dest->u += ((rows + 1) >> 1) * dest->u_stride;
67 dest->v += ((rows + 1) >> 1) * dest->v_stride;
68 dest->h -= rows;
69 }
70
JpegI444ToI420(void * opaque,const uint8 * const * data,const int * strides,int rows)71 static void JpegI444ToI420(void* opaque,
72 const uint8* const* data,
73 const int* strides,
74 int rows) {
75 I420Buffers* dest = (I420Buffers*)(opaque);
76 I444ToI420(data[0], strides[0],
77 data[1], strides[1],
78 data[2], strides[2],
79 dest->y, dest->y_stride,
80 dest->u, dest->u_stride,
81 dest->v, dest->v_stride,
82 dest->w, rows);
83 dest->y += rows * dest->y_stride;
84 dest->u += ((rows + 1) >> 1) * dest->u_stride;
85 dest->v += ((rows + 1) >> 1) * dest->v_stride;
86 dest->h -= rows;
87 }
88
JpegI411ToI420(void * opaque,const uint8 * const * data,const int * strides,int rows)89 static void JpegI411ToI420(void* opaque,
90 const uint8* const* data,
91 const int* strides,
92 int rows) {
93 I420Buffers* dest = (I420Buffers*)(opaque);
94 I411ToI420(data[0], strides[0],
95 data[1], strides[1],
96 data[2], strides[2],
97 dest->y, dest->y_stride,
98 dest->u, dest->u_stride,
99 dest->v, dest->v_stride,
100 dest->w, rows);
101 dest->y += rows * dest->y_stride;
102 dest->u += ((rows + 1) >> 1) * dest->u_stride;
103 dest->v += ((rows + 1) >> 1) * dest->v_stride;
104 dest->h -= rows;
105 }
106
JpegI400ToI420(void * opaque,const uint8 * const * data,const int * strides,int rows)107 static void JpegI400ToI420(void* opaque,
108 const uint8* const* data,
109 const int* strides,
110 int rows) {
111 I420Buffers* dest = (I420Buffers*)(opaque);
112 I400ToI420(data[0], strides[0],
113 dest->y, dest->y_stride,
114 dest->u, dest->u_stride,
115 dest->v, dest->v_stride,
116 dest->w, rows);
117 dest->y += rows * dest->y_stride;
118 dest->u += ((rows + 1) >> 1) * dest->u_stride;
119 dest->v += ((rows + 1) >> 1) * dest->v_stride;
120 dest->h -= rows;
121 }
122
123 // Query size of MJPG in pixels.
124 LIBYUV_API
MJPGSize(const uint8 * sample,size_t sample_size,int * width,int * height)125 int MJPGSize(const uint8* sample, size_t sample_size,
126 int* width, int* height) {
127 MJpegDecoder mjpeg_decoder;
128 LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
129 if (ret) {
130 *width = mjpeg_decoder.GetWidth();
131 *height = mjpeg_decoder.GetHeight();
132 }
133 mjpeg_decoder.UnloadFrame();
134 return ret ? 0 : -1; // -1 for runtime failure.
135 }
136
137 // MJPG (Motion JPeg) to I420
138 // TODO(fbarchard): review w and h requirement. dw and dh may be enough.
139 LIBYUV_API
MJPGToI420(const uint8 * sample,size_t sample_size,uint8 * y,int y_stride,uint8 * u,int u_stride,uint8 * v,int v_stride,int w,int h,int dw,int dh)140 int MJPGToI420(const uint8* sample,
141 size_t sample_size,
142 uint8* y, int y_stride,
143 uint8* u, int u_stride,
144 uint8* v, int v_stride,
145 int w, int h,
146 int dw, int dh) {
147 if (sample_size == kUnknownDataSize) {
148 // ERROR: MJPEG frame size unknown
149 return -1;
150 }
151
152 // TODO(fbarchard): Port MJpeg to C.
153 MJpegDecoder mjpeg_decoder;
154 LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
155 if (ret && (mjpeg_decoder.GetWidth() != w ||
156 mjpeg_decoder.GetHeight() != h)) {
157 // ERROR: MJPEG frame has unexpected dimensions
158 mjpeg_decoder.UnloadFrame();
159 return 1; // runtime failure
160 }
161 if (ret) {
162 I420Buffers bufs = { y, y_stride, u, u_stride, v, v_stride, dw, dh };
163 // YUV420
164 if (mjpeg_decoder.GetColorSpace() ==
165 MJpegDecoder::kColorSpaceYCbCr &&
166 mjpeg_decoder.GetNumComponents() == 3 &&
167 mjpeg_decoder.GetVertSampFactor(0) == 2 &&
168 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
169 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
170 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
171 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
172 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
173 ret = mjpeg_decoder.DecodeToCallback(&JpegCopyI420, &bufs, dw, dh);
174 // YUV422
175 } else if (mjpeg_decoder.GetColorSpace() ==
176 MJpegDecoder::kColorSpaceYCbCr &&
177 mjpeg_decoder.GetNumComponents() == 3 &&
178 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
179 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
180 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
181 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
182 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
183 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
184 ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToI420, &bufs, dw, dh);
185 // YUV444
186 } else if (mjpeg_decoder.GetColorSpace() ==
187 MJpegDecoder::kColorSpaceYCbCr &&
188 mjpeg_decoder.GetNumComponents() == 3 &&
189 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
190 mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
191 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
192 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
193 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
194 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
195 ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToI420, &bufs, dw, dh);
196 // YUV411
197 } else if (mjpeg_decoder.GetColorSpace() ==
198 MJpegDecoder::kColorSpaceYCbCr &&
199 mjpeg_decoder.GetNumComponents() == 3 &&
200 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
201 mjpeg_decoder.GetHorizSampFactor(0) == 4 &&
202 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
203 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
204 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
205 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
206 ret = mjpeg_decoder.DecodeToCallback(&JpegI411ToI420, &bufs, dw, dh);
207 // YUV400
208 } else if (mjpeg_decoder.GetColorSpace() ==
209 MJpegDecoder::kColorSpaceGrayscale &&
210 mjpeg_decoder.GetNumComponents() == 1 &&
211 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
212 mjpeg_decoder.GetHorizSampFactor(0) == 1) {
213 ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToI420, &bufs, dw, dh);
214 } else {
215 // TODO(fbarchard): Implement conversion for any other colorspace/sample
216 // factors that occur in practice. 411 is supported by libjpeg
217 // ERROR: Unable to convert MJPEG frame because format is not supported
218 mjpeg_decoder.UnloadFrame();
219 return 1;
220 }
221 }
222 return ret ? 0 : 1;
223 }
224
225 #ifdef HAVE_JPEG
226 struct ARGBBuffers {
227 uint8* argb;
228 int argb_stride;
229 int w;
230 int h;
231 };
232
JpegI420ToARGB(void * opaque,const uint8 * const * data,const int * strides,int rows)233 static void JpegI420ToARGB(void* opaque,
234 const uint8* const* data,
235 const int* strides,
236 int rows) {
237 ARGBBuffers* dest = (ARGBBuffers*)(opaque);
238 I420ToARGB(data[0], strides[0],
239 data[1], strides[1],
240 data[2], strides[2],
241 dest->argb, dest->argb_stride,
242 dest->w, rows);
243 dest->argb += rows * dest->argb_stride;
244 dest->h -= rows;
245 }
246
JpegI422ToARGB(void * opaque,const uint8 * const * data,const int * strides,int rows)247 static void JpegI422ToARGB(void* opaque,
248 const uint8* const* data,
249 const int* strides,
250 int rows) {
251 ARGBBuffers* dest = (ARGBBuffers*)(opaque);
252 I422ToARGB(data[0], strides[0],
253 data[1], strides[1],
254 data[2], strides[2],
255 dest->argb, dest->argb_stride,
256 dest->w, rows);
257 dest->argb += rows * dest->argb_stride;
258 dest->h -= rows;
259 }
260
JpegI444ToARGB(void * opaque,const uint8 * const * data,const int * strides,int rows)261 static void JpegI444ToARGB(void* opaque,
262 const uint8* const* data,
263 const int* strides,
264 int rows) {
265 ARGBBuffers* dest = (ARGBBuffers*)(opaque);
266 I444ToARGB(data[0], strides[0],
267 data[1], strides[1],
268 data[2], strides[2],
269 dest->argb, dest->argb_stride,
270 dest->w, rows);
271 dest->argb += rows * dest->argb_stride;
272 dest->h -= rows;
273 }
274
JpegI411ToARGB(void * opaque,const uint8 * const * data,const int * strides,int rows)275 static void JpegI411ToARGB(void* opaque,
276 const uint8* const* data,
277 const int* strides,
278 int rows) {
279 ARGBBuffers* dest = (ARGBBuffers*)(opaque);
280 I411ToARGB(data[0], strides[0],
281 data[1], strides[1],
282 data[2], strides[2],
283 dest->argb, dest->argb_stride,
284 dest->w, rows);
285 dest->argb += rows * dest->argb_stride;
286 dest->h -= rows;
287 }
288
JpegI400ToARGB(void * opaque,const uint8 * const * data,const int * strides,int rows)289 static void JpegI400ToARGB(void* opaque,
290 const uint8* const* data,
291 const int* strides,
292 int rows) {
293 ARGBBuffers* dest = (ARGBBuffers*)(opaque);
294 I400ToARGB(data[0], strides[0],
295 dest->argb, dest->argb_stride,
296 dest->w, rows);
297 dest->argb += rows * dest->argb_stride;
298 dest->h -= rows;
299 }
300
301 // MJPG (Motion JPeg) to ARGB
302 // TODO(fbarchard): review w and h requirement. dw and dh may be enough.
303 LIBYUV_API
MJPGToARGB(const uint8 * sample,size_t sample_size,uint8 * argb,int argb_stride,int w,int h,int dw,int dh)304 int MJPGToARGB(const uint8* sample,
305 size_t sample_size,
306 uint8* argb, int argb_stride,
307 int w, int h,
308 int dw, int dh) {
309 if (sample_size == kUnknownDataSize) {
310 // ERROR: MJPEG frame size unknown
311 return -1;
312 }
313
314 // TODO(fbarchard): Port MJpeg to C.
315 MJpegDecoder mjpeg_decoder;
316 LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
317 if (ret && (mjpeg_decoder.GetWidth() != w ||
318 mjpeg_decoder.GetHeight() != h)) {
319 // ERROR: MJPEG frame has unexpected dimensions
320 mjpeg_decoder.UnloadFrame();
321 return 1; // runtime failure
322 }
323 if (ret) {
324 ARGBBuffers bufs = { argb, argb_stride, dw, dh };
325 // YUV420
326 if (mjpeg_decoder.GetColorSpace() ==
327 MJpegDecoder::kColorSpaceYCbCr &&
328 mjpeg_decoder.GetNumComponents() == 3 &&
329 mjpeg_decoder.GetVertSampFactor(0) == 2 &&
330 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
331 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
332 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
333 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
334 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
335 ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToARGB, &bufs, dw, dh);
336 // YUV422
337 } else if (mjpeg_decoder.GetColorSpace() ==
338 MJpegDecoder::kColorSpaceYCbCr &&
339 mjpeg_decoder.GetNumComponents() == 3 &&
340 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
341 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
342 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
343 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
344 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
345 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
346 ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToARGB, &bufs, dw, dh);
347 // YUV444
348 } else if (mjpeg_decoder.GetColorSpace() ==
349 MJpegDecoder::kColorSpaceYCbCr &&
350 mjpeg_decoder.GetNumComponents() == 3 &&
351 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
352 mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
353 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
354 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
355 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
356 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
357 ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToARGB, &bufs, dw, dh);
358 // YUV411
359 } else if (mjpeg_decoder.GetColorSpace() ==
360 MJpegDecoder::kColorSpaceYCbCr &&
361 mjpeg_decoder.GetNumComponents() == 3 &&
362 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
363 mjpeg_decoder.GetHorizSampFactor(0) == 4 &&
364 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
365 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
366 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
367 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
368 ret = mjpeg_decoder.DecodeToCallback(&JpegI411ToARGB, &bufs, dw, dh);
369 // YUV400
370 } else if (mjpeg_decoder.GetColorSpace() ==
371 MJpegDecoder::kColorSpaceGrayscale &&
372 mjpeg_decoder.GetNumComponents() == 1 &&
373 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
374 mjpeg_decoder.GetHorizSampFactor(0) == 1) {
375 ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToARGB, &bufs, dw, dh);
376 } else {
377 // TODO(fbarchard): Implement conversion for any other colorspace/sample
378 // factors that occur in practice. 411 is supported by libjpeg
379 // ERROR: Unable to convert MJPEG frame because format is not supported
380 mjpeg_decoder.UnloadFrame();
381 return 1;
382 }
383 }
384 return ret ? 0 : 1;
385 }
386 #endif
387
388 #endif
389
390 #ifdef __cplusplus
391 } // extern "C"
392 } // namespace libyuv
393 #endif
394