1 /*
2 * GXF muxer.
3 * Copyright (c) 2006 SmartJog S.A., Baptiste Coudurier <baptiste dot coudurier at smartjog dot com>
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 #include "libavutil/avassert.h"
23 #include "libavutil/intfloat.h"
24 #include "libavutil/mathematics.h"
25 #include "avformat.h"
26 #include "avio_internal.h"
27 #include "internal.h"
28 #include "gxf.h"
29 #include "mux.h"
30
31 #define GXF_SAMPLES_PER_FRAME 32768
32 #define GXF_AUDIO_PACKET_SIZE 65536
33
34 #define GXF_TIMECODE(c, d, h, m, s, f) \
35 ((c) << 30 | (d) << 29 | (h) << 24 | (m) << 16 | (s) << 8 | (f))
36
37 typedef struct GXFTimecode{
38 int hh;
39 int mm;
40 int ss;
41 int ff;
42 int color;
43 int drop;
44 } GXFTimecode;
45
46 typedef struct GXFStreamContext {
47 int64_t pkt_cnt;
48 uint32_t track_type;
49 uint32_t sample_size;
50 uint32_t sample_rate;
51 uint16_t media_type;
52 uint16_t media_info;
53 int frame_rate_index;
54 int lines_index;
55 int fields;
56 int iframes;
57 int pframes;
58 int bframes;
59 int p_per_gop;
60 int b_per_i_or_p; ///< number of B-frames per I-frame or P-frame
61 int first_gop_closed;
62 unsigned order; ///< interleaving order
63 } GXFStreamContext;
64
65 typedef struct GXFContext {
66 AVClass *av_class;
67 uint32_t nb_fields;
68 uint16_t audio_tracks;
69 uint16_t mpeg_tracks;
70 int64_t creation_time;
71 uint32_t umf_start_offset;
72 uint32_t umf_track_offset;
73 uint32_t umf_media_offset;
74 uint32_t umf_length;
75 uint16_t umf_track_size;
76 uint16_t umf_media_size;
77 AVRational time_base;
78 int flags;
79 GXFStreamContext timecode_track;
80 unsigned *flt_entries; ///< offsets of packets /1024, starts after 2nd video field
81 unsigned flt_entries_nb;
82 uint64_t *map_offsets; ///< offset of map packets
83 unsigned map_offsets_nb;
84 unsigned packet_count;
85 GXFTimecode tc;
86 } GXFContext;
87
88 static const struct {
89 int height, index;
90 } gxf_lines_tab[] = {
91 { 480, 1 }, /* NTSC */
92 { 512, 1 }, /* NTSC + VBI */
93 { 576, 2 }, /* PAL */
94 { 608, 2 }, /* PAL + VBI */
95 { 1080, 4 },
96 { 720, 6 },
97 };
98
99 static const AVCodecTag gxf_media_types[] = {
100 { AV_CODEC_ID_MJPEG , 3 }, /* NTSC */
101 { AV_CODEC_ID_MJPEG , 4 }, /* PAL */
102 { AV_CODEC_ID_PCM_S24LE , 9 },
103 { AV_CODEC_ID_PCM_S16LE , 10 },
104 { AV_CODEC_ID_MPEG2VIDEO, 11 }, /* NTSC */
105 { AV_CODEC_ID_MPEG2VIDEO, 12 }, /* PAL */
106 { AV_CODEC_ID_DVVIDEO , 13 }, /* NTSC */
107 { AV_CODEC_ID_DVVIDEO , 14 }, /* PAL */
108 { AV_CODEC_ID_DVVIDEO , 15 }, /* 50M NTSC */
109 { AV_CODEC_ID_DVVIDEO , 16 }, /* 50M PAL */
110 { AV_CODEC_ID_AC3 , 17 },
111 //{ AV_CODEC_ID_NONE, , 18 }, /* Non compressed 24 bit audio */
112 { AV_CODEC_ID_MPEG2VIDEO, 20 }, /* MPEG HD */
113 { AV_CODEC_ID_MPEG1VIDEO, 22 }, /* NTSC */
114 { AV_CODEC_ID_MPEG1VIDEO, 23 }, /* PAL */
115 { AV_CODEC_ID_NONE, 0 },
116 };
117
118 #define SERVER_PATH "EXT:/PDR/default/"
119 #define ES_NAME_PATTERN "EXT:/PDR/default/ES."
120
gxf_find_lines_index(AVStream * st)121 static int gxf_find_lines_index(AVStream *st)
122 {
123 GXFStreamContext *sc = st->priv_data;
124 int i;
125
126 for (i = 0; i < 6; ++i) {
127 if (st->codecpar->height == gxf_lines_tab[i].height) {
128 sc->lines_index = gxf_lines_tab[i].index;
129 return 0;
130 }
131 }
132 return -1;
133 }
134
gxf_write_padding(AVIOContext * pb,int64_t to_pad)135 static void gxf_write_padding(AVIOContext *pb, int64_t to_pad)
136 {
137 ffio_fill(pb, 0, to_pad);
138 }
139
updatePacketSize(AVIOContext * pb,int64_t pos)140 static int64_t updatePacketSize(AVIOContext *pb, int64_t pos)
141 {
142 int64_t curpos;
143 int size;
144
145 size = avio_tell(pb) - pos;
146 if (size % 4) {
147 gxf_write_padding(pb, 4 - size % 4);
148 size = avio_tell(pb) - pos;
149 }
150 curpos = avio_tell(pb);
151 avio_seek(pb, pos + 6, SEEK_SET);
152 avio_wb32(pb, size);
153 avio_seek(pb, curpos, SEEK_SET);
154 return curpos - pos;
155 }
156
updateSize(AVIOContext * pb,int64_t pos)157 static int64_t updateSize(AVIOContext *pb, int64_t pos)
158 {
159 int64_t curpos;
160
161 curpos = avio_tell(pb);
162 avio_seek(pb, pos, SEEK_SET);
163 avio_wb16(pb, curpos - pos - 2);
164 avio_seek(pb, curpos, SEEK_SET);
165 return curpos - pos;
166 }
167
gxf_write_packet_header(AVIOContext * pb,GXFPktType type)168 static void gxf_write_packet_header(AVIOContext *pb, GXFPktType type)
169 {
170 avio_wb32(pb, 0); /* packet leader for synchro */
171 avio_w8(pb, 1);
172 avio_w8(pb, type); /* map packet */
173 avio_wb32(pb, 0); /* size */
174 avio_wb32(pb, 0); /* reserved */
175 avio_w8(pb, 0xE1); /* trailer 1 */
176 avio_w8(pb, 0xE2); /* trailer 2 */
177 }
178
gxf_write_mpeg_auxiliary(AVIOContext * pb,AVStream * st)179 static int gxf_write_mpeg_auxiliary(AVIOContext *pb, AVStream *st)
180 {
181 GXFStreamContext *sc = st->priv_data;
182 char buffer[1024];
183 int size, starting_line;
184
185 if (sc->iframes) {
186 sc->p_per_gop = sc->pframes / sc->iframes;
187 if (sc->pframes % sc->iframes)
188 sc->p_per_gop++;
189 if (sc->pframes) {
190 sc->b_per_i_or_p = sc->bframes / sc->pframes;
191 if (sc->bframes % sc->pframes)
192 sc->b_per_i_or_p++;
193 }
194 if (sc->p_per_gop > 9)
195 sc->p_per_gop = 9; /* ensure value won't take more than one char */
196 if (sc->b_per_i_or_p > 9)
197 sc->b_per_i_or_p = 9; /* ensure value won't take more than one char */
198 }
199 if (st->codecpar->height == 512 || st->codecpar->height == 608)
200 starting_line = 7; // VBI
201 else if (st->codecpar->height == 480)
202 starting_line = 20;
203 else
204 starting_line = 23; // default PAL
205
206 size = snprintf(buffer, sizeof(buffer), "Ver 1\nBr %.6f\nIpg 1\nPpi %d\nBpiop %d\n"
207 "Pix 0\nCf %d\nCg %d\nSl %d\nnl16 %d\nVi 1\nf1 1\n",
208 (float)st->codecpar->bit_rate, sc->p_per_gop, sc->b_per_i_or_p,
209 st->codecpar->format == AV_PIX_FMT_YUV422P ? 2 : 1, sc->first_gop_closed == 1,
210 starting_line, (st->codecpar->height + 15) / 16);
211 av_assert0(size < sizeof(buffer));
212 avio_w8(pb, TRACK_MPG_AUX);
213 avio_w8(pb, size + 1);
214 avio_write(pb, (uint8_t *)buffer, size + 1);
215 return size + 3;
216 }
217
gxf_write_dv_auxiliary(AVIOContext * pb,AVStream * st)218 static int gxf_write_dv_auxiliary(AVIOContext *pb, AVStream *st)
219 {
220 int64_t track_aux_data = 0;
221
222 avio_w8(pb, TRACK_AUX);
223 avio_w8(pb, 8);
224 if (st->codecpar->format == AV_PIX_FMT_YUV420P)
225 track_aux_data |= 0x01; /* marks stream as DVCAM instead of DVPRO */
226 track_aux_data |= 0x40000000; /* aux data is valid */
227 avio_wl64(pb, track_aux_data);
228 return 8;
229 }
230
gxf_write_timecode_auxiliary(AVIOContext * pb,GXFContext * gxf)231 static int gxf_write_timecode_auxiliary(AVIOContext *pb, GXFContext *gxf)
232 {
233 uint32_t timecode = GXF_TIMECODE(gxf->tc.color, gxf->tc.drop,
234 gxf->tc.hh, gxf->tc.mm,
235 gxf->tc.ss, gxf->tc.ff);
236
237 avio_w8(pb, TRACK_AUX);
238 avio_w8(pb, 8);
239 avio_wl32(pb, timecode);
240 /* reserved */
241 avio_wl32(pb, 0);
242 return 8;
243 }
244
gxf_write_track_description(AVFormatContext * s,GXFStreamContext * sc,int index)245 static int gxf_write_track_description(AVFormatContext *s, GXFStreamContext *sc, int index)
246 {
247 GXFContext *gxf = s->priv_data;
248 AVIOContext *pb = s->pb;
249 int64_t pos;
250
251 /* track description section */
252 avio_w8(pb, sc->media_type + 0x80);
253 avio_w8(pb, index + 0xC0);
254
255 pos = avio_tell(pb);
256 avio_wb16(pb, 0); /* size */
257
258 /* media file name */
259 avio_w8(pb, TRACK_NAME);
260 avio_w8(pb, strlen(ES_NAME_PATTERN) + 3);
261 avio_write(pb, ES_NAME_PATTERN, sizeof(ES_NAME_PATTERN) - 1);
262 avio_wb16(pb, sc->media_info);
263 avio_w8(pb, 0);
264
265 switch (sc->track_type) {
266 case 3: /* timecode */
267 gxf_write_timecode_auxiliary(pb, gxf);
268 break;
269 case 4: /* MPEG2 */
270 case 9: /* MPEG1 */
271 gxf_write_mpeg_auxiliary(pb, s->streams[index]);
272 break;
273 case 5: /* DV25 */
274 case 6: /* DV50 */
275 gxf_write_dv_auxiliary(pb, s->streams[index]);
276 break;
277 default:
278 avio_w8(pb, TRACK_AUX);
279 avio_w8(pb, 8);
280 avio_wl64(pb, 0);
281 }
282
283 /* file system version */
284 avio_w8(pb, TRACK_VER);
285 avio_w8(pb, 4);
286 avio_wb32(pb, 0);
287
288 /* frame rate */
289 avio_w8(pb, TRACK_FPS);
290 avio_w8(pb, 4);
291 avio_wb32(pb, sc->frame_rate_index);
292
293 /* lines per frame */
294 avio_w8(pb, TRACK_LINES);
295 avio_w8(pb, 4);
296 avio_wb32(pb, sc->lines_index);
297
298 /* fields per frame */
299 avio_w8(pb, TRACK_FPF);
300 avio_w8(pb, 4);
301 avio_wb32(pb, sc->fields);
302
303 return updateSize(pb, pos);
304 }
305
gxf_write_material_data_section(AVFormatContext * s)306 static int gxf_write_material_data_section(AVFormatContext *s)
307 {
308 GXFContext *gxf = s->priv_data;
309 AVIOContext *pb = s->pb;
310 int64_t pos;
311 int len;
312 const char *filename = strrchr(s->url, '/');
313
314 pos = avio_tell(pb);
315 avio_wb16(pb, 0); /* size */
316
317 /* name */
318 if (filename)
319 filename++;
320 else
321 filename = s->url;
322 len = strlen(filename);
323
324 avio_w8(pb, MAT_NAME);
325 avio_w8(pb, strlen(SERVER_PATH) + len + 1);
326 avio_write(pb, SERVER_PATH, sizeof(SERVER_PATH) - 1);
327 avio_write(pb, filename, len);
328 avio_w8(pb, 0);
329
330 /* first field */
331 avio_w8(pb, MAT_FIRST_FIELD);
332 avio_w8(pb, 4);
333 avio_wb32(pb, 0);
334
335 /* last field */
336 avio_w8(pb, MAT_LAST_FIELD);
337 avio_w8(pb, 4);
338 avio_wb32(pb, gxf->nb_fields);
339
340 /* reserved */
341 avio_w8(pb, MAT_MARK_IN);
342 avio_w8(pb, 4);
343 avio_wb32(pb, 0);
344
345 avio_w8(pb, MAT_MARK_OUT);
346 avio_w8(pb, 4);
347 avio_wb32(pb, gxf->nb_fields);
348
349 /* estimated size */
350 avio_w8(pb, MAT_SIZE);
351 avio_w8(pb, 4);
352 avio_wb32(pb, avio_size(pb) / 1024);
353
354 return updateSize(pb, pos);
355 }
356
gxf_write_track_description_section(AVFormatContext * s)357 static int gxf_write_track_description_section(AVFormatContext *s)
358 {
359 GXFContext *gxf = s->priv_data;
360 AVIOContext *pb = s->pb;
361 int64_t pos;
362 int i;
363
364 pos = avio_tell(pb);
365 avio_wb16(pb, 0); /* size */
366 for (i = 0; i < s->nb_streams; ++i)
367 gxf_write_track_description(s, s->streams[i]->priv_data, i);
368
369 gxf_write_track_description(s, &gxf->timecode_track, s->nb_streams);
370
371 return updateSize(pb, pos);
372 }
373
gxf_write_map_packet(AVFormatContext * s,int rewrite)374 static int gxf_write_map_packet(AVFormatContext *s, int rewrite)
375 {
376 GXFContext *gxf = s->priv_data;
377 AVIOContext *pb = s->pb;
378 int64_t pos = avio_tell(pb);
379
380 if (!rewrite) {
381 if (!(gxf->map_offsets_nb % 30)) {
382 int err;
383 if ((err = av_reallocp_array(&gxf->map_offsets,
384 gxf->map_offsets_nb + 30,
385 sizeof(*gxf->map_offsets))) < 0) {
386 gxf->map_offsets_nb = 0;
387 av_log(s, AV_LOG_ERROR, "could not realloc map offsets\n");
388 return err;
389 }
390 }
391 gxf->map_offsets[gxf->map_offsets_nb++] = pos; // do not increment here
392 }
393
394 gxf_write_packet_header(pb, PKT_MAP);
395
396 /* preamble */
397 avio_w8(pb, 0xE0); /* version */
398 avio_w8(pb, 0xFF); /* reserved */
399
400 gxf_write_material_data_section(s);
401 gxf_write_track_description_section(s);
402
403 return updatePacketSize(pb, pos);
404 }
405
gxf_write_flt_packet(AVFormatContext * s)406 static int gxf_write_flt_packet(AVFormatContext *s)
407 {
408 GXFContext *gxf = s->priv_data;
409 AVIOContext *pb = s->pb;
410 int64_t pos = avio_tell(pb);
411 int fields_per_flt = (gxf->nb_fields+1) / 1000 + 1;
412 int flt_entries = gxf->nb_fields / fields_per_flt;
413 int i = 0;
414
415 gxf_write_packet_header(pb, PKT_FLT);
416
417 avio_wl32(pb, fields_per_flt); /* number of fields */
418 avio_wl32(pb, flt_entries); /* number of active flt entries */
419
420 if (gxf->flt_entries) {
421 for (i = 0; i < flt_entries; i++)
422 avio_wl32(pb, gxf->flt_entries[(i*fields_per_flt)>>1]);
423 }
424
425 ffio_fill(pb, 0, (1000 - i) * 4);
426
427 return updatePacketSize(pb, pos);
428 }
429
gxf_write_umf_material_description(AVFormatContext * s)430 static int gxf_write_umf_material_description(AVFormatContext *s)
431 {
432 GXFContext *gxf = s->priv_data;
433 AVIOContext *pb = s->pb;
434 int timecode_base = gxf->time_base.den == 60000 ? 60 : 50;
435 int64_t timestamp = 0;
436 uint64_t nb_fields;
437 uint32_t timecode_in; // timecode at mark in
438 uint32_t timecode_out; // timecode at mark out
439
440 ff_parse_creation_time_metadata(s, ×tamp, 1);
441
442 timecode_in = GXF_TIMECODE(gxf->tc.color, gxf->tc.drop,
443 gxf->tc.hh, gxf->tc.mm,
444 gxf->tc.ss, gxf->tc.ff);
445
446 nb_fields = gxf->nb_fields +
447 gxf->tc.hh * (timecode_base * 3600) +
448 gxf->tc.mm * (timecode_base * 60) +
449 gxf->tc.ss * timecode_base +
450 gxf->tc.ff;
451
452 timecode_out = GXF_TIMECODE(gxf->tc.color, gxf->tc.drop,
453 nb_fields / (timecode_base * 3600) % 24,
454 nb_fields / (timecode_base * 60) % 60,
455 nb_fields / timecode_base % 60,
456 nb_fields % timecode_base);
457
458 avio_wl32(pb, gxf->flags);
459 avio_wl32(pb, gxf->nb_fields); /* length of the longest track */
460 avio_wl32(pb, gxf->nb_fields); /* length of the shortest track */
461 avio_wl32(pb, 0); /* mark in */
462 avio_wl32(pb, gxf->nb_fields); /* mark out */
463 avio_wl32(pb, timecode_in); /* timecode mark in */
464 avio_wl32(pb, timecode_out); /* timecode mark out */
465 avio_wl64(pb, timestamp); /* modification time */
466 avio_wl64(pb, timestamp); /* creation time */
467 avio_wl16(pb, 0); /* reserved */
468 avio_wl16(pb, 0); /* reserved */
469 avio_wl16(pb, gxf->audio_tracks);
470 avio_wl16(pb, 1); /* timecode track count */
471 avio_wl16(pb, 0); /* reserved */
472 avio_wl16(pb, gxf->mpeg_tracks);
473 return 48;
474 }
475
gxf_write_umf_payload(AVFormatContext * s)476 static int gxf_write_umf_payload(AVFormatContext *s)
477 {
478 GXFContext *gxf = s->priv_data;
479 AVIOContext *pb = s->pb;
480
481 avio_wl32(pb, gxf->umf_length); /* total length of the umf data */
482 avio_wl32(pb, 3); /* version */
483 avio_wl32(pb, s->nb_streams+1);
484 avio_wl32(pb, gxf->umf_track_offset); /* umf track section offset */
485 avio_wl32(pb, gxf->umf_track_size);
486 avio_wl32(pb, s->nb_streams+1);
487 avio_wl32(pb, gxf->umf_media_offset);
488 avio_wl32(pb, gxf->umf_media_size);
489 avio_wl32(pb, gxf->umf_length); /* user data offset */
490 avio_wl32(pb, 0); /* user data size */
491 avio_wl32(pb, 0); /* reserved */
492 avio_wl32(pb, 0); /* reserved */
493 return 48;
494 }
495
gxf_write_umf_track_description(AVFormatContext * s)496 static int gxf_write_umf_track_description(AVFormatContext *s)
497 {
498 AVIOContext *pb = s->pb;
499 GXFContext *gxf = s->priv_data;
500 int64_t pos = avio_tell(pb);
501 int i;
502
503 gxf->umf_track_offset = pos - gxf->umf_start_offset;
504 for (i = 0; i < s->nb_streams; ++i) {
505 GXFStreamContext *sc = s->streams[i]->priv_data;
506 avio_wl16(pb, sc->media_info);
507 avio_wl16(pb, 1);
508 }
509
510 avio_wl16(pb, gxf->timecode_track.media_info);
511 avio_wl16(pb, 1);
512
513 return avio_tell(pb) - pos;
514 }
515
gxf_write_umf_media_mpeg(AVIOContext * pb,AVStream * st)516 static int gxf_write_umf_media_mpeg(AVIOContext *pb, AVStream *st)
517 {
518 GXFStreamContext *sc = st->priv_data;
519
520 if (st->codecpar->format == AV_PIX_FMT_YUV422P)
521 avio_wl32(pb, 2);
522 else
523 avio_wl32(pb, 1); /* default to 420 */
524 avio_wl32(pb, sc->first_gop_closed == 1); /* closed = 1, open = 0, unknown = 255 */
525 avio_wl32(pb, 3); /* top = 1, bottom = 2, frame = 3, unknown = 0 */
526 avio_wl32(pb, 1); /* I picture per GOP */
527 avio_wl32(pb, sc->p_per_gop);
528 avio_wl32(pb, sc->b_per_i_or_p);
529 if (st->codecpar->codec_id == AV_CODEC_ID_MPEG2VIDEO)
530 avio_wl32(pb, 2);
531 else if (st->codecpar->codec_id == AV_CODEC_ID_MPEG1VIDEO)
532 avio_wl32(pb, 1);
533 else
534 avio_wl32(pb, 0);
535 avio_wl32(pb, 0); /* reserved */
536 return 32;
537 }
538
gxf_write_umf_media_timecode(AVIOContext * pb,int drop)539 static int gxf_write_umf_media_timecode(AVIOContext *pb, int drop)
540 {
541 avio_wl32(pb, drop); /* drop frame */
542 ffio_fill(pb, 0, 7 * 4); /* reserved */
543 return 32;
544 }
545
gxf_write_umf_media_dv(AVIOContext * pb,GXFStreamContext * sc,AVStream * st)546 static int gxf_write_umf_media_dv(AVIOContext *pb, GXFStreamContext *sc, AVStream *st)
547 {
548 int dv_umf_data = 0;
549
550 if (st->codecpar->format == AV_PIX_FMT_YUV420P)
551 dv_umf_data |= 0x20; /* marks as DVCAM instead of DVPRO */
552 avio_wl32(pb, dv_umf_data);
553 ffio_fill(pb, 0, 7 * 4);
554 return 32;
555 }
556
gxf_write_umf_media_audio(AVIOContext * pb,GXFStreamContext * sc)557 static int gxf_write_umf_media_audio(AVIOContext *pb, GXFStreamContext *sc)
558 {
559 avio_wl64(pb, av_double2int(1)); /* sound level to begin to */
560 avio_wl64(pb, av_double2int(1)); /* sound level to begin to */
561 avio_wl32(pb, 0); /* number of fields over which to ramp up sound level */
562 avio_wl32(pb, 0); /* number of fields over which to ramp down sound level */
563 avio_wl32(pb, 0); /* reserved */
564 avio_wl32(pb, 0); /* reserved */
565 return 32;
566 }
567
gxf_write_umf_media_description(AVFormatContext * s)568 static int gxf_write_umf_media_description(AVFormatContext *s)
569 {
570 GXFContext *gxf = s->priv_data;
571 AVIOContext *pb = s->pb;
572 int64_t pos;
573
574 pos = avio_tell(pb);
575 gxf->umf_media_offset = pos - gxf->umf_start_offset;
576 for (unsigned i = 0; i <= s->nb_streams; ++i) {
577 GXFStreamContext *sc;
578 int64_t startpos, curpos;
579
580 if (i == s->nb_streams)
581 sc = &gxf->timecode_track;
582 else
583 sc = s->streams[i]->priv_data;
584
585 startpos = avio_tell(pb);
586 avio_wl16(pb, 0); /* length */
587 avio_wl16(pb, sc->media_info);
588 avio_wl16(pb, 0); /* reserved */
589 avio_wl16(pb, 0); /* reserved */
590 avio_wl32(pb, gxf->nb_fields);
591 avio_wl32(pb, 0); /* attributes rw, ro */
592 avio_wl32(pb, 0); /* mark in */
593 avio_wl32(pb, gxf->nb_fields); /* mark out */
594 avio_write(pb, ES_NAME_PATTERN, strlen(ES_NAME_PATTERN));
595 avio_wb16(pb, sc->media_info);
596 ffio_fill(pb, 0, 88 - (strlen(ES_NAME_PATTERN) + 2));
597 avio_wl32(pb, sc->track_type);
598 avio_wl32(pb, sc->sample_rate);
599 avio_wl32(pb, sc->sample_size);
600 avio_wl32(pb, 0); /* reserved */
601
602 if (sc == &gxf->timecode_track)
603 gxf_write_umf_media_timecode(pb, gxf->tc.drop);
604 else {
605 AVStream *st = s->streams[i];
606 switch (st->codecpar->codec_id) {
607 case AV_CODEC_ID_MPEG1VIDEO:
608 case AV_CODEC_ID_MPEG2VIDEO:
609 gxf_write_umf_media_mpeg(pb, st);
610 break;
611 case AV_CODEC_ID_PCM_S16LE:
612 gxf_write_umf_media_audio(pb, sc);
613 break;
614 case AV_CODEC_ID_DVVIDEO:
615 gxf_write_umf_media_dv(pb, sc, st);
616 break;
617 }
618 }
619
620 curpos = avio_tell(pb);
621 avio_seek(pb, startpos, SEEK_SET);
622 avio_wl16(pb, curpos - startpos);
623 avio_seek(pb, curpos, SEEK_SET);
624 }
625 return avio_tell(pb) - pos;
626 }
627
gxf_write_umf_packet(AVFormatContext * s)628 static int gxf_write_umf_packet(AVFormatContext *s)
629 {
630 GXFContext *gxf = s->priv_data;
631 AVIOContext *pb = s->pb;
632 int64_t pos = avio_tell(pb);
633
634 gxf_write_packet_header(pb, PKT_UMF);
635
636 /* preamble */
637 avio_w8(pb, 3); /* first and last (only) packet */
638 avio_wb32(pb, gxf->umf_length); /* data length */
639
640 gxf->umf_start_offset = avio_tell(pb);
641 gxf_write_umf_payload(s);
642 gxf_write_umf_material_description(s);
643 gxf->umf_track_size = gxf_write_umf_track_description(s);
644 gxf->umf_media_size = gxf_write_umf_media_description(s);
645 gxf->umf_length = avio_tell(pb) - gxf->umf_start_offset;
646 return updatePacketSize(pb, pos);
647 }
648
gxf_init_timecode_track(GXFStreamContext * sc,GXFStreamContext * vsc)649 static void gxf_init_timecode_track(GXFStreamContext *sc, GXFStreamContext *vsc)
650 {
651 if (!vsc)
652 return;
653
654 sc->media_type = vsc->sample_rate == 60 ? 7 : 8;
655 sc->sample_rate = vsc->sample_rate;
656 sc->media_info = ('T'<<8) | '0';
657 sc->track_type = 3;
658 sc->frame_rate_index = vsc->frame_rate_index;
659 sc->lines_index = vsc->lines_index;
660 sc->sample_size = 16;
661 sc->fields = vsc->fields;
662 }
663
gxf_init_timecode(AVFormatContext * s,GXFTimecode * tc,const char * tcstr,int fields)664 static int gxf_init_timecode(AVFormatContext *s, GXFTimecode *tc, const char *tcstr, int fields)
665 {
666 char c;
667
668 if (sscanf(tcstr, "%d:%d:%d%c%d", &tc->hh, &tc->mm, &tc->ss, &c, &tc->ff) != 5) {
669 av_log(s, AV_LOG_ERROR, "unable to parse timecode, "
670 "syntax: hh:mm:ss[:;.]ff\n");
671 return -1;
672 }
673
674 tc->color = 0;
675 tc->drop = c != ':';
676
677 if (fields == 2)
678 tc->ff = tc->ff * 2;
679
680 return 0;
681 }
682
gxf_write_header(AVFormatContext * s)683 static int gxf_write_header(AVFormatContext *s)
684 {
685 AVIOContext *pb = s->pb;
686 GXFContext *gxf = s->priv_data;
687 GXFStreamContext *vsc = NULL;
688 uint8_t tracks[255] = {0};
689 int i, media_info = 0;
690 int ret;
691 AVDictionaryEntry *tcr = av_dict_get(s->metadata, "timecode", NULL, 0);
692
693 if (!(pb->seekable & AVIO_SEEKABLE_NORMAL)) {
694 av_log(s, AV_LOG_ERROR, "gxf muxer does not support streamed output, patch welcome\n");
695 return -1;
696 }
697
698 gxf->flags |= 0x00080000; /* material is simple clip */
699 for (i = 0; i < s->nb_streams; ++i) {
700 AVStream *st = s->streams[i];
701 GXFStreamContext *sc = av_mallocz(sizeof(*sc));
702 if (!sc)
703 return AVERROR(ENOMEM);
704 st->priv_data = sc;
705
706 sc->media_type = ff_codec_get_tag(gxf_media_types, st->codecpar->codec_id);
707 if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
708 if (st->codecpar->codec_id != AV_CODEC_ID_PCM_S16LE) {
709 av_log(s, AV_LOG_ERROR, "only 16 BIT PCM LE allowed for now\n");
710 return -1;
711 }
712 if (st->codecpar->sample_rate != 48000) {
713 av_log(s, AV_LOG_ERROR, "only 48000hz sampling rate is allowed\n");
714 return -1;
715 }
716 if (st->codecpar->ch_layout.nb_channels != 1) {
717 av_log(s, AV_LOG_ERROR, "only mono tracks are allowed\n");
718 return -1;
719 }
720 ret = ff_stream_add_bitstream_filter(st, "pcm_rechunk", "n="AV_STRINGIFY(GXF_SAMPLES_PER_FRAME));
721 if (ret < 0)
722 return ret;
723 sc->track_type = 2;
724 sc->sample_rate = st->codecpar->sample_rate;
725 avpriv_set_pts_info(st, 64, 1, sc->sample_rate);
726 sc->sample_size = 16;
727 sc->frame_rate_index = -2;
728 sc->lines_index = -2;
729 sc->fields = -2;
730 gxf->audio_tracks++;
731 gxf->flags |= 0x04000000; /* audio is 16 bit pcm */
732 media_info = 'A';
733 } else if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
734 if (i != 0) {
735 av_log(s, AV_LOG_ERROR, "video stream must be the first track\n");
736 return -1;
737 }
738 /* FIXME check from time_base ? */
739 if (st->codecpar->height == 480 || st->codecpar->height == 512) { /* NTSC or NTSC+VBI */
740 sc->frame_rate_index = 5;
741 sc->sample_rate = 60;
742 gxf->flags |= 0x00000080;
743 gxf->time_base = (AVRational){ 1001, 60000 };
744 } else if (st->codecpar->height == 576 || st->codecpar->height == 608) { /* PAL or PAL+VBI */
745 sc->frame_rate_index = 6;
746 sc->media_type++;
747 sc->sample_rate = 50;
748 gxf->flags |= 0x00000040;
749 gxf->time_base = (AVRational){ 1, 50 };
750 } else {
751 av_log(s, AV_LOG_ERROR, "unsupported video resolution, "
752 "gxf muxer only accepts PAL or NTSC resolutions currently\n");
753 return -1;
754 }
755 if (!tcr)
756 tcr = av_dict_get(st->metadata, "timecode", NULL, 0);
757 avpriv_set_pts_info(st, 64, gxf->time_base.num, gxf->time_base.den);
758 if (gxf_find_lines_index(st) < 0)
759 sc->lines_index = -1;
760 sc->sample_size = st->codecpar->bit_rate;
761 sc->fields = 2; /* interlaced */
762
763 vsc = sc;
764
765 switch (st->codecpar->codec_id) {
766 case AV_CODEC_ID_MJPEG:
767 sc->track_type = 1;
768 gxf->flags |= 0x00004000;
769 media_info = 'J';
770 break;
771 case AV_CODEC_ID_MPEG1VIDEO:
772 sc->track_type = 9;
773 gxf->mpeg_tracks++;
774 media_info = 'L';
775 break;
776 case AV_CODEC_ID_MPEG2VIDEO:
777 sc->first_gop_closed = -1;
778 sc->track_type = 4;
779 gxf->mpeg_tracks++;
780 gxf->flags |= 0x00008000;
781 media_info = 'M';
782 break;
783 case AV_CODEC_ID_DVVIDEO:
784 if (st->codecpar->format == AV_PIX_FMT_YUV422P) {
785 sc->media_type += 2;
786 sc->track_type = 6;
787 gxf->flags |= 0x00002000;
788 media_info = 'E';
789 } else {
790 sc->track_type = 5;
791 gxf->flags |= 0x00001000;
792 media_info = 'D';
793 }
794 break;
795 default:
796 av_log(s, AV_LOG_ERROR, "video codec not supported\n");
797 return -1;
798 }
799 }
800 /* FIXME first 10 audio tracks are 0 to 9 next 22 are A to V */
801 sc->media_info = media_info<<8 | ('0'+tracks[media_info]++);
802 sc->order = s->nb_streams - st->index;
803 }
804
805 if (tcr && vsc)
806 gxf_init_timecode(s, &gxf->tc, tcr->value, vsc->fields);
807
808 gxf_init_timecode_track(&gxf->timecode_track, vsc);
809 gxf->flags |= 0x200000; // time code track is non-drop frame
810
811 if ((ret = gxf_write_map_packet(s, 0)) < 0)
812 return ret;
813 gxf_write_flt_packet(s);
814 gxf_write_umf_packet(s);
815
816 gxf->packet_count = 3;
817
818 return 0;
819 }
820
gxf_write_eos_packet(AVIOContext * pb)821 static int gxf_write_eos_packet(AVIOContext *pb)
822 {
823 int64_t pos = avio_tell(pb);
824
825 gxf_write_packet_header(pb, PKT_EOS);
826 return updatePacketSize(pb, pos);
827 }
828
gxf_write_trailer(AVFormatContext * s)829 static int gxf_write_trailer(AVFormatContext *s)
830 {
831 GXFContext *gxf = s->priv_data;
832 AVIOContext *pb = s->pb;
833 int64_t end;
834 int i;
835 int ret;
836
837 gxf_write_eos_packet(pb);
838 end = avio_tell(pb);
839 avio_seek(pb, 0, SEEK_SET);
840 /* overwrite map, flt and umf packets with new values */
841 if ((ret = gxf_write_map_packet(s, 1)) < 0)
842 return ret;
843 gxf_write_flt_packet(s);
844 gxf_write_umf_packet(s);
845 /* update duration in all map packets */
846 for (i = 1; i < gxf->map_offsets_nb; i++) {
847 avio_seek(pb, gxf->map_offsets[i], SEEK_SET);
848 if ((ret = gxf_write_map_packet(s, 1)) < 0)
849 return ret;
850 }
851
852 avio_seek(pb, end, SEEK_SET);
853
854 return 0;
855 }
856
gxf_deinit(AVFormatContext * s)857 static void gxf_deinit(AVFormatContext *s)
858 {
859 GXFContext *gxf = s->priv_data;
860
861 av_freep(&gxf->flt_entries);
862 av_freep(&gxf->map_offsets);
863 }
864
gxf_parse_mpeg_frame(GXFStreamContext * sc,const uint8_t * buf,int size)865 static int gxf_parse_mpeg_frame(GXFStreamContext *sc, const uint8_t *buf, int size)
866 {
867 uint32_t c=-1;
868 int i;
869 for(i=0; i<size-4 && c!=0x100; i++){
870 c = (c<<8) + buf[i];
871 if(c == 0x1B8 && sc->first_gop_closed == -1) /* GOP start code */
872 sc->first_gop_closed= (buf[i+4]>>6)&1;
873 }
874 return (buf[i+1]>>3)&7;
875 }
876
gxf_write_media_preamble(AVFormatContext * s,AVPacket * pkt,int size)877 static int gxf_write_media_preamble(AVFormatContext *s, AVPacket *pkt, int size)
878 {
879 GXFContext *gxf = s->priv_data;
880 AVIOContext *pb = s->pb;
881 AVStream *st = s->streams[pkt->stream_index];
882 GXFStreamContext *sc = st->priv_data;
883 unsigned field_nb;
884 /* If the video is frame-encoded, the frame numbers shall be represented by
885 * even field numbers.
886 * see SMPTE360M-2004 6.4.2.1.3 Media field number */
887 if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
888 field_nb = gxf->nb_fields;
889 } else {
890 field_nb = av_rescale_rnd(pkt->dts, gxf->time_base.den,
891 (int64_t)48000*gxf->time_base.num, AV_ROUND_UP);
892 }
893
894 avio_w8(pb, sc->media_type);
895 avio_w8(pb, st->index);
896 avio_wb32(pb, field_nb);
897 if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
898 avio_wb16(pb, 0);
899 avio_wb16(pb, size / 2);
900 } else if (st->codecpar->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
901 int frame_type = gxf_parse_mpeg_frame(sc, pkt->data, pkt->size);
902 if (frame_type == AV_PICTURE_TYPE_I) {
903 avio_w8(pb, 0x0d);
904 sc->iframes++;
905 } else if (frame_type == AV_PICTURE_TYPE_B) {
906 avio_w8(pb, 0x0f);
907 sc->bframes++;
908 } else {
909 avio_w8(pb, 0x0e);
910 sc->pframes++;
911 }
912 avio_wb24(pb, size);
913 } else if (st->codecpar->codec_id == AV_CODEC_ID_DVVIDEO) {
914 avio_w8(pb, size / 4096);
915 avio_wb24(pb, 0);
916 } else
917 avio_wb32(pb, size);
918 avio_wb32(pb, field_nb);
919 avio_w8(pb, 1); /* flags */
920 avio_w8(pb, 0); /* reserved */
921 return 16;
922 }
923
gxf_write_packet(AVFormatContext * s,AVPacket * pkt)924 static int gxf_write_packet(AVFormatContext *s, AVPacket *pkt)
925 {
926 GXFContext *gxf = s->priv_data;
927 AVIOContext *pb = s->pb;
928 AVStream *st = s->streams[pkt->stream_index];
929 int64_t pos = avio_tell(pb);
930 int padding = 0;
931 unsigned packet_start_offset = avio_tell(pb) / 1024;
932 int ret;
933
934 gxf_write_packet_header(pb, PKT_MEDIA);
935 if (st->codecpar->codec_id == AV_CODEC_ID_MPEG2VIDEO && pkt->size % 4) /* MPEG-2 frames must be padded */
936 padding = 4 - pkt->size % 4;
937 else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
938 padding = GXF_AUDIO_PACKET_SIZE - pkt->size;
939 gxf_write_media_preamble(s, pkt, pkt->size + padding);
940 avio_write(pb, pkt->data, pkt->size);
941 gxf_write_padding(pb, padding);
942
943 if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
944 if (!(gxf->flt_entries_nb % 500)) {
945 int err;
946 if ((err = av_reallocp_array(&gxf->flt_entries,
947 gxf->flt_entries_nb + 500,
948 sizeof(*gxf->flt_entries))) < 0) {
949 gxf->flt_entries_nb = 0;
950 gxf->nb_fields = 0;
951 av_log(s, AV_LOG_ERROR, "could not reallocate flt entries\n");
952 return err;
953 }
954 }
955 gxf->flt_entries[gxf->flt_entries_nb++] = packet_start_offset;
956 gxf->nb_fields += 2; // count fields
957 }
958
959 updatePacketSize(pb, pos);
960
961 gxf->packet_count++;
962 if (gxf->packet_count == 100) {
963 if ((ret = gxf_write_map_packet(s, 0)) < 0)
964 return ret;
965 gxf->packet_count = 0;
966 }
967
968 return 0;
969 }
970
gxf_compare_field_nb(AVFormatContext * s,const AVPacket * next,const AVPacket * cur)971 static int gxf_compare_field_nb(AVFormatContext *s, const AVPacket *next,
972 const AVPacket *cur)
973 {
974 GXFContext *gxf = s->priv_data;
975 const AVPacket *pkt[2] = { cur, next };
976 int i, field_nb[2];
977 GXFStreamContext *sc[2];
978
979 for (i = 0; i < 2; i++) {
980 AVStream *st = s->streams[pkt[i]->stream_index];
981 sc[i] = st->priv_data;
982 if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
983 field_nb[i] = av_rescale_rnd(pkt[i]->dts, gxf->time_base.den,
984 (int64_t)48000*gxf->time_base.num, AV_ROUND_UP);
985 field_nb[i] &= ~1; // compare against even field number because audio must be before video
986 } else
987 field_nb[i] = pkt[i]->dts; // dts are field based
988 }
989
990 return field_nb[1] > field_nb[0] ||
991 (field_nb[1] == field_nb[0] && sc[1]->order > sc[0]->order);
992 }
993
gxf_interleave_packet(AVFormatContext * s,AVPacket * pkt,int flush,int has_packet)994 static int gxf_interleave_packet(AVFormatContext *s, AVPacket *pkt,
995 int flush, int has_packet)
996 {
997 int ret;
998 if (has_packet) {
999 AVStream *st = s->streams[pkt->stream_index];
1000 GXFStreamContext *sc = st->priv_data;
1001 if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
1002 pkt->pts = pkt->dts = sc->pkt_cnt * 2; // enforce 2 fields
1003 else
1004 pkt->pts = pkt->dts = sc->pkt_cnt * GXF_SAMPLES_PER_FRAME;
1005 sc->pkt_cnt++;
1006 if ((ret = ff_interleave_add_packet(s, pkt, gxf_compare_field_nb)) < 0)
1007 return ret;
1008 }
1009 return ff_interleave_packet_per_dts(s, pkt, flush, 0);
1010 }
1011
1012 const AVOutputFormat ff_gxf_muxer = {
1013 .name = "gxf",
1014 .long_name = NULL_IF_CONFIG_SMALL("GXF (General eXchange Format)"),
1015 .extensions = "gxf",
1016 .priv_data_size = sizeof(GXFContext),
1017 .audio_codec = AV_CODEC_ID_PCM_S16LE,
1018 .video_codec = AV_CODEC_ID_MPEG2VIDEO,
1019 .write_header = gxf_write_header,
1020 .write_packet = gxf_write_packet,
1021 .write_trailer = gxf_write_trailer,
1022 .deinit = gxf_deinit,
1023 .interleave_packet = gxf_interleave_packet,
1024 };
1025