1 /*
2 * Copyright 2011 The LibYuv Project Authors. All rights reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "libyuv/convert.h"
12 #include "libyuv/convert_argb.h"
13
14 #ifdef HAVE_JPEG
15 #include "libyuv/mjpeg_decoder.h"
16 #endif
17
18 #ifdef __cplusplus
19 namespace libyuv {
20 extern "C" {
21 #endif
22
23 #ifdef HAVE_JPEG
24 struct I420Buffers {
25 uint8_t* y;
26 int y_stride;
27 uint8_t* u;
28 int u_stride;
29 uint8_t* v;
30 int v_stride;
31 int w;
32 int h;
33 };
34
JpegCopyI420(void * opaque,const uint8_t * const * data,const int * strides,int rows)35 static void JpegCopyI420(void* opaque,
36 const uint8_t* const* data,
37 const int* strides,
38 int rows) {
39 I420Buffers* dest = (I420Buffers*)(opaque);
40 I420Copy(data[0], strides[0], data[1], strides[1], data[2], strides[2],
41 dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
42 dest->v_stride, dest->w, rows);
43 dest->y += rows * dest->y_stride;
44 dest->u += ((rows + 1) >> 1) * dest->u_stride;
45 dest->v += ((rows + 1) >> 1) * dest->v_stride;
46 dest->h -= rows;
47 }
48
JpegI422ToI420(void * opaque,const uint8_t * const * data,const int * strides,int rows)49 static void JpegI422ToI420(void* opaque,
50 const uint8_t* const* data,
51 const int* strides,
52 int rows) {
53 I420Buffers* dest = (I420Buffers*)(opaque);
54 I422ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2],
55 dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
56 dest->v_stride, dest->w, rows);
57 dest->y += rows * dest->y_stride;
58 dest->u += ((rows + 1) >> 1) * dest->u_stride;
59 dest->v += ((rows + 1) >> 1) * dest->v_stride;
60 dest->h -= rows;
61 }
62
JpegI444ToI420(void * opaque,const uint8_t * const * data,const int * strides,int rows)63 static void JpegI444ToI420(void* opaque,
64 const uint8_t* const* data,
65 const int* strides,
66 int rows) {
67 I420Buffers* dest = (I420Buffers*)(opaque);
68 I444ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2],
69 dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
70 dest->v_stride, dest->w, rows);
71 dest->y += rows * dest->y_stride;
72 dest->u += ((rows + 1) >> 1) * dest->u_stride;
73 dest->v += ((rows + 1) >> 1) * dest->v_stride;
74 dest->h -= rows;
75 }
76
JpegI400ToI420(void * opaque,const uint8_t * const * data,const int * strides,int rows)77 static void JpegI400ToI420(void* opaque,
78 const uint8_t* const* data,
79 const int* strides,
80 int rows) {
81 I420Buffers* dest = (I420Buffers*)(opaque);
82 I400ToI420(data[0], strides[0], dest->y, dest->y_stride, dest->u,
83 dest->u_stride, dest->v, dest->v_stride, dest->w, rows);
84 dest->y += rows * dest->y_stride;
85 dest->u += ((rows + 1) >> 1) * dest->u_stride;
86 dest->v += ((rows + 1) >> 1) * dest->v_stride;
87 dest->h -= rows;
88 }
89
90 // Query size of MJPG in pixels.
91 LIBYUV_API
MJPGSize(const uint8_t * src_mjpg,size_t src_size_mjpg,int * width,int * height)92 int MJPGSize(const uint8_t* src_mjpg,
93 size_t src_size_mjpg,
94 int* width,
95 int* height) {
96 MJpegDecoder mjpeg_decoder;
97 LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(src_mjpg, src_size_mjpg);
98 if (ret) {
99 *width = mjpeg_decoder.GetWidth();
100 *height = mjpeg_decoder.GetHeight();
101 }
102 mjpeg_decoder.UnloadFrame();
103 return ret ? 0 : -1; // -1 for runtime failure.
104 }
105
106 // MJPG (Motion JPeg) to I420
107 // TODO(fbarchard): review src_width and src_height requirement. dst_width and
108 // dst_height may be enough.
109 LIBYUV_API
MJPGToI420(const uint8_t * src_mjpg,size_t src_size_mjpg,uint8_t * dst_y,int dst_stride_y,uint8_t * dst_u,int dst_stride_u,uint8_t * dst_v,int dst_stride_v,int src_width,int src_height,int dst_width,int dst_height)110 int MJPGToI420(const uint8_t* src_mjpg,
111 size_t src_size_mjpg,
112 uint8_t* dst_y,
113 int dst_stride_y,
114 uint8_t* dst_u,
115 int dst_stride_u,
116 uint8_t* dst_v,
117 int dst_stride_v,
118 int src_width,
119 int src_height,
120 int dst_width,
121 int dst_height) {
122 if (src_size_mjpg == kUnknownDataSize) {
123 // ERROR: MJPEG frame size unknown
124 return -1;
125 }
126
127 // TODO(fbarchard): Port MJpeg to C.
128 MJpegDecoder mjpeg_decoder;
129 LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(src_mjpg, src_size_mjpg);
130 if (ret && (mjpeg_decoder.GetWidth() != src_width ||
131 mjpeg_decoder.GetHeight() != src_height)) {
132 // ERROR: MJPEG frame has unexpected dimensions
133 mjpeg_decoder.UnloadFrame();
134 return 1; // runtime failure
135 }
136 if (ret) {
137 I420Buffers bufs = {dst_y, dst_stride_y, dst_u, dst_stride_u,
138 dst_v, dst_stride_v, dst_width, dst_height};
139 // YUV420
140 if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
141 mjpeg_decoder.GetNumComponents() == 3 &&
142 mjpeg_decoder.GetVertSampFactor(0) == 2 &&
143 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
144 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
145 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
146 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
147 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
148 ret = mjpeg_decoder.DecodeToCallback(&JpegCopyI420, &bufs, dst_width,
149 dst_height);
150 // YUV422
151 } else if (mjpeg_decoder.GetColorSpace() ==
152 MJpegDecoder::kColorSpaceYCbCr &&
153 mjpeg_decoder.GetNumComponents() == 3 &&
154 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
155 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
156 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
157 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
158 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
159 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
160 ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToI420, &bufs, dst_width,
161 dst_height);
162 // YUV444
163 } else if (mjpeg_decoder.GetColorSpace() ==
164 MJpegDecoder::kColorSpaceYCbCr &&
165 mjpeg_decoder.GetNumComponents() == 3 &&
166 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
167 mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
168 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
169 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
170 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
171 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
172 ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToI420, &bufs, dst_width,
173 dst_height);
174 // YUV400
175 } else if (mjpeg_decoder.GetColorSpace() ==
176 MJpegDecoder::kColorSpaceGrayscale &&
177 mjpeg_decoder.GetNumComponents() == 1 &&
178 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
179 mjpeg_decoder.GetHorizSampFactor(0) == 1) {
180 ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToI420, &bufs, dst_width,
181 dst_height);
182 } else {
183 // TODO(fbarchard): Implement conversion for any other
184 // colorspace/subsample factors that occur in practice. ERROR: Unable to
185 // convert MJPEG frame because format is not supported
186 mjpeg_decoder.UnloadFrame();
187 return 1;
188 }
189 }
190 return ret ? 0 : 1;
191 }
192
193 struct NV21Buffers {
194 uint8_t* y;
195 int y_stride;
196 uint8_t* vu;
197 int vu_stride;
198 int w;
199 int h;
200 };
201
JpegI420ToNV21(void * opaque,const uint8_t * const * data,const int * strides,int rows)202 static void JpegI420ToNV21(void* opaque,
203 const uint8_t* const* data,
204 const int* strides,
205 int rows) {
206 NV21Buffers* dest = (NV21Buffers*)(opaque);
207 I420ToNV21(data[0], strides[0], data[1], strides[1], data[2], strides[2],
208 dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows);
209 dest->y += rows * dest->y_stride;
210 dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
211 dest->h -= rows;
212 }
213
JpegI422ToNV21(void * opaque,const uint8_t * const * data,const int * strides,int rows)214 static void JpegI422ToNV21(void* opaque,
215 const uint8_t* const* data,
216 const int* strides,
217 int rows) {
218 NV21Buffers* dest = (NV21Buffers*)(opaque);
219 I422ToNV21(data[0], strides[0], data[1], strides[1], data[2], strides[2],
220 dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows);
221 dest->y += rows * dest->y_stride;
222 dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
223 dest->h -= rows;
224 }
225
JpegI444ToNV21(void * opaque,const uint8_t * const * data,const int * strides,int rows)226 static void JpegI444ToNV21(void* opaque,
227 const uint8_t* const* data,
228 const int* strides,
229 int rows) {
230 NV21Buffers* dest = (NV21Buffers*)(opaque);
231 I444ToNV21(data[0], strides[0], data[1], strides[1], data[2], strides[2],
232 dest->y, dest->y_stride, dest->vu, dest->vu_stride, dest->w, rows);
233 dest->y += rows * dest->y_stride;
234 dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
235 dest->h -= rows;
236 }
237
JpegI400ToNV21(void * opaque,const uint8_t * const * data,const int * strides,int rows)238 static void JpegI400ToNV21(void* opaque,
239 const uint8_t* const* data,
240 const int* strides,
241 int rows) {
242 NV21Buffers* dest = (NV21Buffers*)(opaque);
243 I400ToNV21(data[0], strides[0], dest->y, dest->y_stride, dest->vu,
244 dest->vu_stride, dest->w, rows);
245 dest->y += rows * dest->y_stride;
246 dest->vu += ((rows + 1) >> 1) * dest->vu_stride;
247 dest->h -= rows;
248 }
249
250 // MJPG (Motion JPeg) to NV21
251 LIBYUV_API
MJPGToNV21(const uint8_t * src_mjpg,size_t src_size_mjpg,uint8_t * dst_y,int dst_stride_y,uint8_t * dst_vu,int dst_stride_vu,int src_width,int src_height,int dst_width,int dst_height)252 int MJPGToNV21(const uint8_t* src_mjpg,
253 size_t src_size_mjpg,
254 uint8_t* dst_y,
255 int dst_stride_y,
256 uint8_t* dst_vu,
257 int dst_stride_vu,
258 int src_width,
259 int src_height,
260 int dst_width,
261 int dst_height) {
262 if (src_size_mjpg == kUnknownDataSize) {
263 // ERROR: MJPEG frame size unknown
264 return -1;
265 }
266
267 // TODO(fbarchard): Port MJpeg to C.
268 MJpegDecoder mjpeg_decoder;
269 LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(src_mjpg, src_size_mjpg);
270 if (ret && (mjpeg_decoder.GetWidth() != src_width ||
271 mjpeg_decoder.GetHeight() != src_height)) {
272 // ERROR: MJPEG frame has unexpected dimensions
273 mjpeg_decoder.UnloadFrame();
274 return 1; // runtime failure
275 }
276 if (ret) {
277 NV21Buffers bufs = {dst_y, dst_stride_y, dst_vu,
278 dst_stride_vu, dst_width, dst_height};
279 // YUV420
280 if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
281 mjpeg_decoder.GetNumComponents() == 3 &&
282 mjpeg_decoder.GetVertSampFactor(0) == 2 &&
283 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
284 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
285 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
286 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
287 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
288 ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToNV21, &bufs, dst_width,
289 dst_height);
290 // YUV422
291 } else if (mjpeg_decoder.GetColorSpace() ==
292 MJpegDecoder::kColorSpaceYCbCr &&
293 mjpeg_decoder.GetNumComponents() == 3 &&
294 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
295 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
296 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
297 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
298 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
299 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
300 ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToNV21, &bufs, dst_width,
301 dst_height);
302 // YUV444
303 } else if (mjpeg_decoder.GetColorSpace() ==
304 MJpegDecoder::kColorSpaceYCbCr &&
305 mjpeg_decoder.GetNumComponents() == 3 &&
306 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
307 mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
308 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
309 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
310 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
311 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
312 ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToNV21, &bufs, dst_width,
313 dst_height);
314 // YUV400
315 } else if (mjpeg_decoder.GetColorSpace() ==
316 MJpegDecoder::kColorSpaceGrayscale &&
317 mjpeg_decoder.GetNumComponents() == 1 &&
318 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
319 mjpeg_decoder.GetHorizSampFactor(0) == 1) {
320 ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToNV21, &bufs, dst_width,
321 dst_height);
322 } else {
323 // Unknown colorspace.
324 mjpeg_decoder.UnloadFrame();
325 return 1;
326 }
327 }
328 return ret ? 0 : 1;
329 }
330
331 struct ARGBBuffers {
332 uint8_t* argb;
333 int argb_stride;
334 int w;
335 int h;
336 };
337
JpegI420ToARGB(void * opaque,const uint8_t * const * data,const int * strides,int rows)338 static void JpegI420ToARGB(void* opaque,
339 const uint8_t* const* data,
340 const int* strides,
341 int rows) {
342 ARGBBuffers* dest = (ARGBBuffers*)(opaque);
343 I420ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
344 dest->argb, dest->argb_stride, dest->w, rows);
345 dest->argb += rows * dest->argb_stride;
346 dest->h -= rows;
347 }
348
JpegI422ToARGB(void * opaque,const uint8_t * const * data,const int * strides,int rows)349 static void JpegI422ToARGB(void* opaque,
350 const uint8_t* const* data,
351 const int* strides,
352 int rows) {
353 ARGBBuffers* dest = (ARGBBuffers*)(opaque);
354 I422ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
355 dest->argb, dest->argb_stride, dest->w, rows);
356 dest->argb += rows * dest->argb_stride;
357 dest->h -= rows;
358 }
359
JpegI444ToARGB(void * opaque,const uint8_t * const * data,const int * strides,int rows)360 static void JpegI444ToARGB(void* opaque,
361 const uint8_t* const* data,
362 const int* strides,
363 int rows) {
364 ARGBBuffers* dest = (ARGBBuffers*)(opaque);
365 I444ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
366 dest->argb, dest->argb_stride, dest->w, rows);
367 dest->argb += rows * dest->argb_stride;
368 dest->h -= rows;
369 }
370
JpegI400ToARGB(void * opaque,const uint8_t * const * data,const int * strides,int rows)371 static void JpegI400ToARGB(void* opaque,
372 const uint8_t* const* data,
373 const int* strides,
374 int rows) {
375 ARGBBuffers* dest = (ARGBBuffers*)(opaque);
376 I400ToARGB(data[0], strides[0], dest->argb, dest->argb_stride, dest->w, rows);
377 dest->argb += rows * dest->argb_stride;
378 dest->h -= rows;
379 }
380
381 // MJPG (Motion JPeg) to ARGB
382 // TODO(fbarchard): review src_width and src_height requirement. dst_width and
383 // dst_height may be enough.
384 LIBYUV_API
MJPGToARGB(const uint8_t * src_mjpg,size_t src_size_mjpg,uint8_t * dst_argb,int dst_stride_argb,int src_width,int src_height,int dst_width,int dst_height)385 int MJPGToARGB(const uint8_t* src_mjpg,
386 size_t src_size_mjpg,
387 uint8_t* dst_argb,
388 int dst_stride_argb,
389 int src_width,
390 int src_height,
391 int dst_width,
392 int dst_height) {
393 if (src_size_mjpg == kUnknownDataSize) {
394 // ERROR: MJPEG frame size unknown
395 return -1;
396 }
397
398 // TODO(fbarchard): Port MJpeg to C.
399 MJpegDecoder mjpeg_decoder;
400 LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(src_mjpg, src_size_mjpg);
401 if (ret && (mjpeg_decoder.GetWidth() != src_width ||
402 mjpeg_decoder.GetHeight() != src_height)) {
403 // ERROR: MJPEG frame has unexpected dimensions
404 mjpeg_decoder.UnloadFrame();
405 return 1; // runtime failure
406 }
407 if (ret) {
408 ARGBBuffers bufs = {dst_argb, dst_stride_argb, dst_width, dst_height};
409 // YUV420
410 if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
411 mjpeg_decoder.GetNumComponents() == 3 &&
412 mjpeg_decoder.GetVertSampFactor(0) == 2 &&
413 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
414 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
415 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
416 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
417 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
418 ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToARGB, &bufs, dst_width,
419 dst_height);
420 // YUV422
421 } else if (mjpeg_decoder.GetColorSpace() ==
422 MJpegDecoder::kColorSpaceYCbCr &&
423 mjpeg_decoder.GetNumComponents() == 3 &&
424 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
425 mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
426 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
427 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
428 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
429 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
430 ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToARGB, &bufs, dst_width,
431 dst_height);
432 // YUV444
433 } else if (mjpeg_decoder.GetColorSpace() ==
434 MJpegDecoder::kColorSpaceYCbCr &&
435 mjpeg_decoder.GetNumComponents() == 3 &&
436 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
437 mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
438 mjpeg_decoder.GetVertSampFactor(1) == 1 &&
439 mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
440 mjpeg_decoder.GetVertSampFactor(2) == 1 &&
441 mjpeg_decoder.GetHorizSampFactor(2) == 1) {
442 ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToARGB, &bufs, dst_width,
443 dst_height);
444 // YUV400
445 } else if (mjpeg_decoder.GetColorSpace() ==
446 MJpegDecoder::kColorSpaceGrayscale &&
447 mjpeg_decoder.GetNumComponents() == 1 &&
448 mjpeg_decoder.GetVertSampFactor(0) == 1 &&
449 mjpeg_decoder.GetHorizSampFactor(0) == 1) {
450 ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToARGB, &bufs, dst_width,
451 dst_height);
452 } else {
453 // TODO(fbarchard): Implement conversion for any other
454 // colorspace/subsample factors that occur in practice. ERROR: Unable to
455 // convert MJPEG frame because format is not supported
456 mjpeg_decoder.UnloadFrame();
457 return 1;
458 }
459 }
460 return ret ? 0 : 1;
461 }
462
463 #endif // HAVE_JPEG
464
465 #ifdef __cplusplus
466 } // extern "C"
467 } // namespace libyuv
468 #endif
469