1 /*
2 * Dynamic Adaptive Streaming over HTTP demux
3 * Copyright (c) 2017 samsamsam@o2.pl based on HLS demux
4 * Copyright (c) 2017 Steven Liu
5 *
6 * This file is part of FFmpeg.
7 *
8 * FFmpeg is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2.1 of the License, or (at your option) any later version.
12 *
13 * FFmpeg is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
17 *
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with FFmpeg; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 */
22 #include <libxml/parser.h>
23 #include "libavutil/bprint.h"
24 #include "libavutil/opt.h"
25 #include "libavutil/time.h"
26 #include "libavutil/parseutils.h"
27 #include "internal.h"
28 #include "avio_internal.h"
29 #include "dash.h"
30 #include "demux.h"
31
32 #define INITIAL_BUFFER_SIZE 32768
33
34 struct fragment {
35 int64_t url_offset;
36 int64_t size;
37 char *url;
38 };
39
40 /*
41 * reference to : ISO_IEC_23009-1-DASH-2012
42 * Section: 5.3.9.6.2
43 * Table: Table 17 — Semantics of SegmentTimeline element
44 * */
45 struct timeline {
46 /* starttime: Element or Attribute Name
47 * specifies the MPD start time, in @timescale units,
48 * the first Segment in the series starts relative to the beginning of the Period.
49 * The value of this attribute must be equal to or greater than the sum of the previous S
50 * element earliest presentation time and the sum of the contiguous Segment durations.
51 * If the value of the attribute is greater than what is expressed by the previous S element,
52 * it expresses discontinuities in the timeline.
53 * If not present then the value shall be assumed to be zero for the first S element
54 * and for the subsequent S elements, the value shall be assumed to be the sum of
55 * the previous S element's earliest presentation time and contiguous duration
56 * (i.e. previous S@starttime + @duration * (@repeat + 1)).
57 * */
58 int64_t starttime;
59 /* repeat: Element or Attribute Name
60 * specifies the repeat count of the number of following contiguous Segments with
61 * the same duration expressed by the value of @duration. This value is zero-based
62 * (e.g. a value of three means four Segments in the contiguous series).
63 * */
64 int64_t repeat;
65 /* duration: Element or Attribute Name
66 * specifies the Segment duration, in units of the value of the @timescale.
67 * */
68 int64_t duration;
69 };
70
71 /*
72 * Each playlist has its own demuxer. If it is currently active,
73 * it has an opened AVIOContext too, and potentially an AVPacket
74 * containing the next packet from this stream.
75 */
76 struct representation {
77 char *url_template;
78 FFIOContext pb;
79 AVIOContext *input;
80 AVFormatContext *parent;
81 AVFormatContext *ctx;
82 int stream_index;
83
84 char *id;
85 char *lang;
86 int bandwidth;
87 AVRational framerate;
88 AVStream *assoc_stream; /* demuxer stream associated with this representation */
89
90 int n_fragments;
91 struct fragment **fragments; /* VOD list of fragment for profile */
92
93 int n_timelines;
94 struct timeline **timelines;
95
96 int64_t first_seq_no;
97 int64_t last_seq_no;
98 int64_t start_number; /* used in case when we have dynamic list of segment to know which segments are new one*/
99
100 int64_t fragment_duration;
101 int64_t fragment_timescale;
102
103 int64_t presentation_timeoffset;
104
105 int64_t cur_seq_no;
106 int64_t cur_seg_offset;
107 int64_t cur_seg_size;
108 struct fragment *cur_seg;
109
110 /* Currently active Media Initialization Section */
111 struct fragment *init_section;
112 uint8_t *init_sec_buf;
113 uint32_t init_sec_buf_size;
114 uint32_t init_sec_data_len;
115 uint32_t init_sec_buf_read_offset;
116 int64_t cur_timestamp;
117 int is_restart_needed;
118 };
119
120 typedef struct DASHContext {
121 const AVClass *class;
122 char *base_url;
123
124 int n_videos;
125 struct representation **videos;
126 int n_audios;
127 struct representation **audios;
128 int n_subtitles;
129 struct representation **subtitles;
130
131 /* MediaPresentationDescription Attribute */
132 uint64_t media_presentation_duration;
133 uint64_t suggested_presentation_delay;
134 uint64_t availability_start_time;
135 uint64_t availability_end_time;
136 uint64_t publish_time;
137 uint64_t minimum_update_period;
138 uint64_t time_shift_buffer_depth;
139 uint64_t min_buffer_time;
140
141 /* Period Attribute */
142 uint64_t period_duration;
143 uint64_t period_start;
144
145 /* AdaptationSet Attribute */
146 char *adaptionset_lang;
147
148 int is_live;
149 AVIOInterruptCB *interrupt_callback;
150 char *allowed_extensions;
151 AVDictionary *avio_opts;
152 int max_url_size;
153 char *cenc_decryption_key;
154
155 /* Flags for init section*/
156 int is_init_section_common_video;
157 int is_init_section_common_audio;
158 int is_init_section_common_subtitle;
159
160 } DASHContext;
161
ishttp(char * url)162 static int ishttp(char *url)
163 {
164 const char *proto_name = avio_find_protocol_name(url);
165 return proto_name && av_strstart(proto_name, "http", NULL);
166 }
167
aligned(int val)168 static int aligned(int val)
169 {
170 return ((val + 0x3F) >> 6) << 6;
171 }
172
get_current_time_in_sec(void)173 static uint64_t get_current_time_in_sec(void)
174 {
175 return av_gettime() / 1000000;
176 }
177
get_utc_date_time_insec(AVFormatContext * s,const char * datetime)178 static uint64_t get_utc_date_time_insec(AVFormatContext *s, const char *datetime)
179 {
180 struct tm timeinfo;
181 int year = 0;
182 int month = 0;
183 int day = 0;
184 int hour = 0;
185 int minute = 0;
186 int ret = 0;
187 float second = 0.0;
188
189 /* ISO-8601 date parser */
190 if (!datetime)
191 return 0;
192
193 ret = sscanf(datetime, "%d-%d-%dT%d:%d:%fZ", &year, &month, &day, &hour, &minute, &second);
194 /* year, month, day, hour, minute, second 6 arguments */
195 if (ret != 6) {
196 av_log(s, AV_LOG_WARNING, "get_utc_date_time_insec get a wrong time format\n");
197 }
198 timeinfo.tm_year = year - 1900;
199 timeinfo.tm_mon = month - 1;
200 timeinfo.tm_mday = day;
201 timeinfo.tm_hour = hour;
202 timeinfo.tm_min = minute;
203 timeinfo.tm_sec = (int)second;
204
205 return av_timegm(&timeinfo);
206 }
207
get_duration_insec(AVFormatContext * s,const char * duration)208 static uint32_t get_duration_insec(AVFormatContext *s, const char *duration)
209 {
210 /* ISO-8601 duration parser */
211 uint32_t days = 0;
212 uint32_t hours = 0;
213 uint32_t mins = 0;
214 uint32_t secs = 0;
215 int size = 0;
216 float value = 0;
217 char type = '\0';
218 const char *ptr = duration;
219
220 while (*ptr) {
221 if (*ptr == 'P' || *ptr == 'T') {
222 ptr++;
223 continue;
224 }
225
226 if (sscanf(ptr, "%f%c%n", &value, &type, &size) != 2) {
227 av_log(s, AV_LOG_WARNING, "get_duration_insec get a wrong time format\n");
228 return 0; /* parser error */
229 }
230 switch (type) {
231 case 'D':
232 days = (uint32_t)value;
233 break;
234 case 'H':
235 hours = (uint32_t)value;
236 break;
237 case 'M':
238 mins = (uint32_t)value;
239 break;
240 case 'S':
241 secs = (uint32_t)value;
242 break;
243 default:
244 // handle invalid type
245 break;
246 }
247 ptr += size;
248 }
249 return ((days * 24 + hours) * 60 + mins) * 60 + secs;
250 }
251
get_segment_start_time_based_on_timeline(struct representation * pls,int64_t cur_seq_no)252 static int64_t get_segment_start_time_based_on_timeline(struct representation *pls, int64_t cur_seq_no)
253 {
254 int64_t start_time = 0;
255 int64_t i = 0;
256 int64_t j = 0;
257 int64_t num = 0;
258
259 if (pls->n_timelines) {
260 for (i = 0; i < pls->n_timelines; i++) {
261 if (pls->timelines[i]->starttime > 0) {
262 start_time = pls->timelines[i]->starttime;
263 }
264 if (num == cur_seq_no)
265 goto finish;
266
267 start_time += pls->timelines[i]->duration;
268
269 if (pls->timelines[i]->repeat == -1) {
270 start_time = pls->timelines[i]->duration * cur_seq_no;
271 goto finish;
272 }
273
274 for (j = 0; j < pls->timelines[i]->repeat; j++) {
275 num++;
276 if (num == cur_seq_no)
277 goto finish;
278 start_time += pls->timelines[i]->duration;
279 }
280 num++;
281 }
282 }
283 finish:
284 return start_time;
285 }
286
calc_next_seg_no_from_timelines(struct representation * pls,int64_t cur_time)287 static int64_t calc_next_seg_no_from_timelines(struct representation *pls, int64_t cur_time)
288 {
289 int64_t i = 0;
290 int64_t j = 0;
291 int64_t num = 0;
292 int64_t start_time = 0;
293
294 for (i = 0; i < pls->n_timelines; i++) {
295 if (pls->timelines[i]->starttime > 0) {
296 start_time = pls->timelines[i]->starttime;
297 }
298 if (start_time > cur_time)
299 goto finish;
300
301 start_time += pls->timelines[i]->duration;
302 for (j = 0; j < pls->timelines[i]->repeat; j++) {
303 num++;
304 if (start_time > cur_time)
305 goto finish;
306 start_time += pls->timelines[i]->duration;
307 }
308 num++;
309 }
310
311 return -1;
312
313 finish:
314 return num;
315 }
316
free_fragment(struct fragment ** seg)317 static void free_fragment(struct fragment **seg)
318 {
319 if (!(*seg)) {
320 return;
321 }
322 av_freep(&(*seg)->url);
323 av_freep(seg);
324 }
325
free_fragment_list(struct representation * pls)326 static void free_fragment_list(struct representation *pls)
327 {
328 int i;
329
330 for (i = 0; i < pls->n_fragments; i++) {
331 free_fragment(&pls->fragments[i]);
332 }
333 av_freep(&pls->fragments);
334 pls->n_fragments = 0;
335 }
336
free_timelines_list(struct representation * pls)337 static void free_timelines_list(struct representation *pls)
338 {
339 int i;
340
341 for (i = 0; i < pls->n_timelines; i++) {
342 av_freep(&pls->timelines[i]);
343 }
344 av_freep(&pls->timelines);
345 pls->n_timelines = 0;
346 }
347
free_representation(struct representation * pls)348 static void free_representation(struct representation *pls)
349 {
350 free_fragment_list(pls);
351 free_timelines_list(pls);
352 free_fragment(&pls->cur_seg);
353 free_fragment(&pls->init_section);
354 av_freep(&pls->init_sec_buf);
355 av_freep(&pls->pb.pub.buffer);
356 ff_format_io_close(pls->parent, &pls->input);
357 if (pls->ctx) {
358 pls->ctx->pb = NULL;
359 avformat_close_input(&pls->ctx);
360 }
361
362 av_freep(&pls->url_template);
363 av_freep(&pls->lang);
364 av_freep(&pls->id);
365 av_freep(&pls);
366 }
367
free_video_list(DASHContext * c)368 static void free_video_list(DASHContext *c)
369 {
370 int i;
371 for (i = 0; i < c->n_videos; i++) {
372 struct representation *pls = c->videos[i];
373 free_representation(pls);
374 }
375 av_freep(&c->videos);
376 c->n_videos = 0;
377 }
378
free_audio_list(DASHContext * c)379 static void free_audio_list(DASHContext *c)
380 {
381 int i;
382 for (i = 0; i < c->n_audios; i++) {
383 struct representation *pls = c->audios[i];
384 free_representation(pls);
385 }
386 av_freep(&c->audios);
387 c->n_audios = 0;
388 }
389
free_subtitle_list(DASHContext * c)390 static void free_subtitle_list(DASHContext *c)
391 {
392 int i;
393 for (i = 0; i < c->n_subtitles; i++) {
394 struct representation *pls = c->subtitles[i];
395 free_representation(pls);
396 }
397 av_freep(&c->subtitles);
398 c->n_subtitles = 0;
399 }
400
open_url(AVFormatContext * s,AVIOContext ** pb,const char * url,AVDictionary ** opts,AVDictionary * opts2,int * is_http)401 static int open_url(AVFormatContext *s, AVIOContext **pb, const char *url,
402 AVDictionary **opts, AVDictionary *opts2, int *is_http)
403 {
404 DASHContext *c = s->priv_data;
405 AVDictionary *tmp = NULL;
406 const char *proto_name = NULL;
407 int proto_name_len;
408 int ret;
409
410 if (av_strstart(url, "crypto", NULL)) {
411 if (url[6] == '+' || url[6] == ':')
412 proto_name = avio_find_protocol_name(url + 7);
413 }
414
415 if (!proto_name)
416 proto_name = avio_find_protocol_name(url);
417
418 if (!proto_name)
419 return AVERROR_INVALIDDATA;
420
421 proto_name_len = strlen(proto_name);
422 // only http(s) & file are allowed
423 if (av_strstart(proto_name, "file", NULL)) {
424 if (strcmp(c->allowed_extensions, "ALL") && !av_match_ext(url, c->allowed_extensions)) {
425 av_log(s, AV_LOG_ERROR,
426 "Filename extension of \'%s\' is not a common multimedia extension, blocked for security reasons.\n"
427 "If you wish to override this adjust allowed_extensions, you can set it to \'ALL\' to allow all\n",
428 url);
429 return AVERROR_INVALIDDATA;
430 }
431 } else if (av_strstart(proto_name, "http", NULL)) {
432 ;
433 } else
434 return AVERROR_INVALIDDATA;
435
436 if (!strncmp(proto_name, url, proto_name_len) && url[proto_name_len] == ':')
437 ;
438 else if (av_strstart(url, "crypto", NULL) && !strncmp(proto_name, url + 7, proto_name_len) && url[7 + proto_name_len] == ':')
439 ;
440 else if (strcmp(proto_name, "file") || !strncmp(url, "file,", 5))
441 return AVERROR_INVALIDDATA;
442
443 av_freep(pb);
444 av_dict_copy(&tmp, *opts, 0);
445 av_dict_copy(&tmp, opts2, 0);
446 ret = avio_open2(pb, url, AVIO_FLAG_READ, c->interrupt_callback, &tmp);
447 if (ret >= 0) {
448 // update cookies on http response with setcookies.
449 char *new_cookies = NULL;
450
451 if (!(s->flags & AVFMT_FLAG_CUSTOM_IO))
452 av_opt_get(*pb, "cookies", AV_OPT_SEARCH_CHILDREN, (uint8_t**)&new_cookies);
453
454 if (new_cookies) {
455 av_dict_set(opts, "cookies", new_cookies, AV_DICT_DONT_STRDUP_VAL);
456 }
457
458 }
459
460 av_dict_free(&tmp);
461
462 if (is_http)
463 *is_http = av_strstart(proto_name, "http", NULL);
464
465 return ret;
466 }
467
get_content_url(xmlNodePtr * baseurl_nodes,int n_baseurl_nodes,int max_url_size,char * rep_id_val,char * rep_bandwidth_val,char * val)468 static char *get_content_url(xmlNodePtr *baseurl_nodes,
469 int n_baseurl_nodes,
470 int max_url_size,
471 char *rep_id_val,
472 char *rep_bandwidth_val,
473 char *val)
474 {
475 int i;
476 char *text;
477 char *url = NULL;
478 char *tmp_str = av_mallocz(max_url_size);
479
480 if (!tmp_str)
481 return NULL;
482
483 for (i = 0; i < n_baseurl_nodes; ++i) {
484 if (baseurl_nodes[i] &&
485 baseurl_nodes[i]->children &&
486 baseurl_nodes[i]->children->type == XML_TEXT_NODE) {
487 text = xmlNodeGetContent(baseurl_nodes[i]->children);
488 if (text) {
489 memset(tmp_str, 0, max_url_size);
490 ff_make_absolute_url(tmp_str, max_url_size, "", text);
491 xmlFree(text);
492 }
493 }
494 }
495
496 if (val)
497 ff_make_absolute_url(tmp_str, max_url_size, tmp_str, val);
498
499 if (rep_id_val) {
500 url = av_strireplace(tmp_str, "$RepresentationID$", rep_id_val);
501 if (!url) {
502 goto end;
503 }
504 av_strlcpy(tmp_str, url, max_url_size);
505 }
506 if (rep_bandwidth_val && tmp_str[0] != '\0') {
507 // free any previously assigned url before reassigning
508 av_free(url);
509 url = av_strireplace(tmp_str, "$Bandwidth$", rep_bandwidth_val);
510 if (!url) {
511 goto end;
512 }
513 }
514 end:
515 av_free(tmp_str);
516 return url;
517 }
518
get_val_from_nodes_tab(xmlNodePtr * nodes,const int n_nodes,const char * attrname)519 static char *get_val_from_nodes_tab(xmlNodePtr *nodes, const int n_nodes, const char *attrname)
520 {
521 int i;
522 char *val;
523
524 for (i = 0; i < n_nodes; ++i) {
525 if (nodes[i]) {
526 val = xmlGetProp(nodes[i], attrname);
527 if (val)
528 return val;
529 }
530 }
531
532 return NULL;
533 }
534
find_child_node_by_name(xmlNodePtr rootnode,const char * nodename)535 static xmlNodePtr find_child_node_by_name(xmlNodePtr rootnode, const char *nodename)
536 {
537 xmlNodePtr node = rootnode;
538 if (!node) {
539 return NULL;
540 }
541
542 node = xmlFirstElementChild(node);
543 while (node) {
544 if (!av_strcasecmp(node->name, nodename)) {
545 return node;
546 }
547 node = xmlNextElementSibling(node);
548 }
549 return NULL;
550 }
551
get_content_type(xmlNodePtr node)552 static enum AVMediaType get_content_type(xmlNodePtr node)
553 {
554 enum AVMediaType type = AVMEDIA_TYPE_UNKNOWN;
555 int i = 0;
556 const char *attr;
557 char *val = NULL;
558
559 if (node) {
560 for (i = 0; i < 2; i++) {
561 attr = i ? "mimeType" : "contentType";
562 val = xmlGetProp(node, attr);
563 if (val) {
564 if (av_stristr(val, "video")) {
565 type = AVMEDIA_TYPE_VIDEO;
566 } else if (av_stristr(val, "audio")) {
567 type = AVMEDIA_TYPE_AUDIO;
568 } else if (av_stristr(val, "text")) {
569 type = AVMEDIA_TYPE_SUBTITLE;
570 }
571 xmlFree(val);
572 }
573 }
574 }
575 return type;
576 }
577
get_Fragment(char * range)578 static struct fragment * get_Fragment(char *range)
579 {
580 struct fragment * seg = av_mallocz(sizeof(struct fragment));
581
582 if (!seg)
583 return NULL;
584
585 seg->size = -1;
586 if (range) {
587 char *str_end_offset;
588 char *str_offset = av_strtok(range, "-", &str_end_offset);
589 seg->url_offset = strtoll(str_offset, NULL, 10);
590 seg->size = strtoll(str_end_offset, NULL, 10) - seg->url_offset + 1;
591 }
592
593 return seg;
594 }
595
parse_manifest_segmenturlnode(AVFormatContext * s,struct representation * rep,xmlNodePtr fragmenturl_node,xmlNodePtr * baseurl_nodes,char * rep_id_val,char * rep_bandwidth_val)596 static int parse_manifest_segmenturlnode(AVFormatContext *s, struct representation *rep,
597 xmlNodePtr fragmenturl_node,
598 xmlNodePtr *baseurl_nodes,
599 char *rep_id_val,
600 char *rep_bandwidth_val)
601 {
602 DASHContext *c = s->priv_data;
603 char *initialization_val = NULL;
604 char *media_val = NULL;
605 char *range_val = NULL;
606 int max_url_size = c ? c->max_url_size: MAX_URL_SIZE;
607 int err;
608
609 if (!av_strcasecmp(fragmenturl_node->name, "Initialization")) {
610 initialization_val = xmlGetProp(fragmenturl_node, "sourceURL");
611 range_val = xmlGetProp(fragmenturl_node, "range");
612 if (initialization_val || range_val) {
613 free_fragment(&rep->init_section);
614 rep->init_section = get_Fragment(range_val);
615 xmlFree(range_val);
616 if (!rep->init_section) {
617 xmlFree(initialization_val);
618 return AVERROR(ENOMEM);
619 }
620 rep->init_section->url = get_content_url(baseurl_nodes, 4,
621 max_url_size,
622 rep_id_val,
623 rep_bandwidth_val,
624 initialization_val);
625 xmlFree(initialization_val);
626 if (!rep->init_section->url) {
627 av_freep(&rep->init_section);
628 return AVERROR(ENOMEM);
629 }
630 }
631 } else if (!av_strcasecmp(fragmenturl_node->name, "SegmentURL")) {
632 media_val = xmlGetProp(fragmenturl_node, "media");
633 range_val = xmlGetProp(fragmenturl_node, "mediaRange");
634 if (media_val || range_val) {
635 struct fragment *seg = get_Fragment(range_val);
636 xmlFree(range_val);
637 if (!seg) {
638 xmlFree(media_val);
639 return AVERROR(ENOMEM);
640 }
641 seg->url = get_content_url(baseurl_nodes, 4,
642 max_url_size,
643 rep_id_val,
644 rep_bandwidth_val,
645 media_val);
646 xmlFree(media_val);
647 if (!seg->url) {
648 av_free(seg);
649 return AVERROR(ENOMEM);
650 }
651 err = av_dynarray_add_nofree(&rep->fragments, &rep->n_fragments, seg);
652 if (err < 0) {
653 free_fragment(&seg);
654 return err;
655 }
656 }
657 }
658
659 return 0;
660 }
661
parse_manifest_segmenttimeline(AVFormatContext * s,struct representation * rep,xmlNodePtr fragment_timeline_node)662 static int parse_manifest_segmenttimeline(AVFormatContext *s, struct representation *rep,
663 xmlNodePtr fragment_timeline_node)
664 {
665 xmlAttrPtr attr = NULL;
666 char *val = NULL;
667 int err;
668
669 if (!av_strcasecmp(fragment_timeline_node->name, "S")) {
670 struct timeline *tml = av_mallocz(sizeof(struct timeline));
671 if (!tml) {
672 return AVERROR(ENOMEM);
673 }
674 attr = fragment_timeline_node->properties;
675 while (attr) {
676 val = xmlGetProp(fragment_timeline_node, attr->name);
677
678 if (!val) {
679 av_log(s, AV_LOG_WARNING, "parse_manifest_segmenttimeline attr->name = %s val is NULL\n", attr->name);
680 continue;
681 }
682
683 if (!av_strcasecmp(attr->name, "t")) {
684 tml->starttime = (int64_t)strtoll(val, NULL, 10);
685 } else if (!av_strcasecmp(attr->name, "r")) {
686 tml->repeat =(int64_t) strtoll(val, NULL, 10);
687 } else if (!av_strcasecmp(attr->name, "d")) {
688 tml->duration = (int64_t)strtoll(val, NULL, 10);
689 }
690 attr = attr->next;
691 xmlFree(val);
692 }
693 err = av_dynarray_add_nofree(&rep->timelines, &rep->n_timelines, tml);
694 if (err < 0) {
695 av_free(tml);
696 return err;
697 }
698 }
699
700 return 0;
701 }
702
resolve_content_path(AVFormatContext * s,const char * url,int * max_url_size,xmlNodePtr * baseurl_nodes,int n_baseurl_nodes)703 static int resolve_content_path(AVFormatContext *s, const char *url, int *max_url_size, xmlNodePtr *baseurl_nodes, int n_baseurl_nodes)
704 {
705 char *tmp_str = NULL;
706 char *path = NULL;
707 char *mpdName = NULL;
708 xmlNodePtr node = NULL;
709 char *baseurl = NULL;
710 char *root_url = NULL;
711 char *text = NULL;
712 char *tmp = NULL;
713 int isRootHttp = 0;
714 char token ='/';
715 int start = 0;
716 int rootId = 0;
717 int updated = 0;
718 int size = 0;
719 int i;
720 int tmp_max_url_size = strlen(url);
721
722 for (i = n_baseurl_nodes-1; i >= 0 ; i--) {
723 text = xmlNodeGetContent(baseurl_nodes[i]);
724 if (!text)
725 continue;
726 tmp_max_url_size += strlen(text);
727 if (ishttp(text)) {
728 xmlFree(text);
729 break;
730 }
731 xmlFree(text);
732 }
733
734 tmp_max_url_size = aligned(tmp_max_url_size);
735 text = av_mallocz(tmp_max_url_size);
736 if (!text) {
737 updated = AVERROR(ENOMEM);
738 goto end;
739 }
740 av_strlcpy(text, url, strlen(url)+1);
741 tmp = text;
742 while (mpdName = av_strtok(tmp, "/", &tmp)) {
743 size = strlen(mpdName);
744 }
745 av_free(text);
746
747 path = av_mallocz(tmp_max_url_size);
748 tmp_str = av_mallocz(tmp_max_url_size);
749 if (!tmp_str || !path) {
750 updated = AVERROR(ENOMEM);
751 goto end;
752 }
753
754 av_strlcpy (path, url, strlen(url) - size + 1);
755 for (rootId = n_baseurl_nodes - 1; rootId > 0; rootId --) {
756 if (!(node = baseurl_nodes[rootId])) {
757 continue;
758 }
759 text = xmlNodeGetContent(node);
760 if (ishttp(text)) {
761 xmlFree(text);
762 break;
763 }
764 xmlFree(text);
765 }
766
767 node = baseurl_nodes[rootId];
768 baseurl = xmlNodeGetContent(node);
769 root_url = (av_strcasecmp(baseurl, "")) ? baseurl : path;
770 if (node) {
771 xmlNodeSetContent(node, root_url);
772 updated = 1;
773 }
774
775 size = strlen(root_url);
776 isRootHttp = ishttp(root_url);
777
778 if (size > 0 && root_url[size - 1] != token) {
779 av_strlcat(root_url, "/", size + 2);
780 size += 2;
781 }
782
783 for (i = 0; i < n_baseurl_nodes; ++i) {
784 if (i == rootId) {
785 continue;
786 }
787 text = xmlNodeGetContent(baseurl_nodes[i]);
788 if (text && !av_strstart(text, "/", NULL)) {
789 memset(tmp_str, 0, strlen(tmp_str));
790 if (!ishttp(text) && isRootHttp) {
791 av_strlcpy(tmp_str, root_url, size + 1);
792 }
793 start = (text[0] == token);
794 if (start && av_stristr(tmp_str, text)) {
795 char *p = tmp_str;
796 if (!av_strncasecmp(tmp_str, "http://", 7)) {
797 p += 7;
798 } else if (!av_strncasecmp(tmp_str, "https://", 8)) {
799 p += 8;
800 }
801 p = strchr(p, '/');
802 memset(p + 1, 0, strlen(p));
803 }
804 av_strlcat(tmp_str, text + start, tmp_max_url_size);
805 xmlNodeSetContent(baseurl_nodes[i], tmp_str);
806 updated = 1;
807 xmlFree(text);
808 }
809 }
810
811 end:
812 if (tmp_max_url_size > *max_url_size) {
813 *max_url_size = tmp_max_url_size;
814 }
815 av_free(path);
816 av_free(tmp_str);
817 xmlFree(baseurl);
818 return updated;
819
820 }
821
parse_manifest_representation(AVFormatContext * s,const char * url,xmlNodePtr node,xmlNodePtr adaptionset_node,xmlNodePtr mpd_baseurl_node,xmlNodePtr period_baseurl_node,xmlNodePtr period_segmenttemplate_node,xmlNodePtr period_segmentlist_node,xmlNodePtr fragment_template_node,xmlNodePtr content_component_node,xmlNodePtr adaptionset_baseurl_node,xmlNodePtr adaptionset_segmentlist_node,xmlNodePtr adaptionset_supplementalproperty_node)822 static int parse_manifest_representation(AVFormatContext *s, const char *url,
823 xmlNodePtr node,
824 xmlNodePtr adaptionset_node,
825 xmlNodePtr mpd_baseurl_node,
826 xmlNodePtr period_baseurl_node,
827 xmlNodePtr period_segmenttemplate_node,
828 xmlNodePtr period_segmentlist_node,
829 xmlNodePtr fragment_template_node,
830 xmlNodePtr content_component_node,
831 xmlNodePtr adaptionset_baseurl_node,
832 xmlNodePtr adaptionset_segmentlist_node,
833 xmlNodePtr adaptionset_supplementalproperty_node)
834 {
835 int32_t ret = 0;
836 DASHContext *c = s->priv_data;
837 struct representation *rep = NULL;
838 struct fragment *seg = NULL;
839 xmlNodePtr representation_segmenttemplate_node = NULL;
840 xmlNodePtr representation_baseurl_node = NULL;
841 xmlNodePtr representation_segmentlist_node = NULL;
842 xmlNodePtr segmentlists_tab[3];
843 xmlNodePtr fragment_timeline_node = NULL;
844 xmlNodePtr fragment_templates_tab[5];
845 char *val = NULL;
846 xmlNodePtr baseurl_nodes[4];
847 xmlNodePtr representation_node = node;
848 char *rep_bandwidth_val;
849 enum AVMediaType type = AVMEDIA_TYPE_UNKNOWN;
850
851 // try get information from representation
852 if (type == AVMEDIA_TYPE_UNKNOWN)
853 type = get_content_type(representation_node);
854 // try get information from contentComponen
855 if (type == AVMEDIA_TYPE_UNKNOWN)
856 type = get_content_type(content_component_node);
857 // try get information from adaption set
858 if (type == AVMEDIA_TYPE_UNKNOWN)
859 type = get_content_type(adaptionset_node);
860 if (type != AVMEDIA_TYPE_VIDEO && type != AVMEDIA_TYPE_AUDIO &&
861 type != AVMEDIA_TYPE_SUBTITLE) {
862 av_log(s, AV_LOG_VERBOSE, "Parsing '%s' - skipp not supported representation type\n", url);
863 return 0;
864 }
865
866 // convert selected representation to our internal struct
867 rep = av_mallocz(sizeof(struct representation));
868 if (!rep)
869 return AVERROR(ENOMEM);
870 if (c->adaptionset_lang) {
871 rep->lang = av_strdup(c->adaptionset_lang);
872 if (!rep->lang) {
873 av_log(s, AV_LOG_ERROR, "alloc language memory failure\n");
874 av_freep(&rep);
875 return AVERROR(ENOMEM);
876 }
877 }
878 rep->parent = s;
879 representation_segmenttemplate_node = find_child_node_by_name(representation_node, "SegmentTemplate");
880 representation_baseurl_node = find_child_node_by_name(representation_node, "BaseURL");
881 representation_segmentlist_node = find_child_node_by_name(representation_node, "SegmentList");
882 rep_bandwidth_val = xmlGetProp(representation_node, "bandwidth");
883 val = xmlGetProp(representation_node, "id");
884 if (val) {
885 rep->id = av_strdup(val);
886 xmlFree(val);
887 if (!rep->id)
888 goto enomem;
889 }
890
891 baseurl_nodes[0] = mpd_baseurl_node;
892 baseurl_nodes[1] = period_baseurl_node;
893 baseurl_nodes[2] = adaptionset_baseurl_node;
894 baseurl_nodes[3] = representation_baseurl_node;
895
896 ret = resolve_content_path(s, url, &c->max_url_size, baseurl_nodes, 4);
897 c->max_url_size = aligned(c->max_url_size
898 + (rep->id ? strlen(rep->id) : 0)
899 + (rep_bandwidth_val ? strlen(rep_bandwidth_val) : 0));
900 if (ret == AVERROR(ENOMEM) || ret == 0)
901 goto free;
902 if (representation_segmenttemplate_node || fragment_template_node || period_segmenttemplate_node) {
903 fragment_timeline_node = NULL;
904 fragment_templates_tab[0] = representation_segmenttemplate_node;
905 fragment_templates_tab[1] = adaptionset_segmentlist_node;
906 fragment_templates_tab[2] = fragment_template_node;
907 fragment_templates_tab[3] = period_segmenttemplate_node;
908 fragment_templates_tab[4] = period_segmentlist_node;
909
910 val = get_val_from_nodes_tab(fragment_templates_tab, 4, "initialization");
911 if (val) {
912 rep->init_section = av_mallocz(sizeof(struct fragment));
913 if (!rep->init_section) {
914 xmlFree(val);
915 goto enomem;
916 }
917 c->max_url_size = aligned(c->max_url_size + strlen(val));
918 rep->init_section->url = get_content_url(baseurl_nodes, 4,
919 c->max_url_size, rep->id,
920 rep_bandwidth_val, val);
921 xmlFree(val);
922 if (!rep->init_section->url)
923 goto enomem;
924 rep->init_section->size = -1;
925 }
926 val = get_val_from_nodes_tab(fragment_templates_tab, 4, "media");
927 if (val) {
928 c->max_url_size = aligned(c->max_url_size + strlen(val));
929 rep->url_template = get_content_url(baseurl_nodes, 4,
930 c->max_url_size, rep->id,
931 rep_bandwidth_val, val);
932 xmlFree(val);
933 }
934 val = get_val_from_nodes_tab(fragment_templates_tab, 4, "presentationTimeOffset");
935 if (val) {
936 rep->presentation_timeoffset = (int64_t) strtoll(val, NULL, 10);
937 av_log(s, AV_LOG_TRACE, "rep->presentation_timeoffset = [%"PRId64"]\n", rep->presentation_timeoffset);
938 xmlFree(val);
939 }
940 val = get_val_from_nodes_tab(fragment_templates_tab, 4, "duration");
941 if (val) {
942 rep->fragment_duration = (int64_t) strtoll(val, NULL, 10);
943 av_log(s, AV_LOG_TRACE, "rep->fragment_duration = [%"PRId64"]\n", rep->fragment_duration);
944 xmlFree(val);
945 }
946 val = get_val_from_nodes_tab(fragment_templates_tab, 4, "timescale");
947 if (val) {
948 rep->fragment_timescale = (int64_t) strtoll(val, NULL, 10);
949 av_log(s, AV_LOG_TRACE, "rep->fragment_timescale = [%"PRId64"]\n", rep->fragment_timescale);
950 xmlFree(val);
951 }
952 val = get_val_from_nodes_tab(fragment_templates_tab, 4, "startNumber");
953 if (val) {
954 rep->start_number = rep->first_seq_no = (int64_t) strtoll(val, NULL, 10);
955 av_log(s, AV_LOG_TRACE, "rep->first_seq_no = [%"PRId64"]\n", rep->first_seq_no);
956 xmlFree(val);
957 }
958 if (adaptionset_supplementalproperty_node) {
959 char *scheme_id_uri = xmlGetProp(adaptionset_supplementalproperty_node, "schemeIdUri");
960 if (scheme_id_uri) {
961 int is_last_segment_number = !av_strcasecmp(scheme_id_uri, "http://dashif.org/guidelines/last-segment-number");
962 xmlFree(scheme_id_uri);
963 if (is_last_segment_number) {
964 val = xmlGetProp(adaptionset_supplementalproperty_node,"value");
965 if (!val) {
966 av_log(s, AV_LOG_ERROR, "Missing value attribute in adaptionset_supplementalproperty_node\n");
967 } else {
968 rep->last_seq_no =(int64_t) strtoll(val, NULL, 10) - 1;
969 xmlFree(val);
970 }
971 }
972 }
973 }
974
975 fragment_timeline_node = find_child_node_by_name(representation_segmenttemplate_node, "SegmentTimeline");
976
977 if (!fragment_timeline_node)
978 fragment_timeline_node = find_child_node_by_name(fragment_template_node, "SegmentTimeline");
979 if (!fragment_timeline_node)
980 fragment_timeline_node = find_child_node_by_name(adaptionset_segmentlist_node, "SegmentTimeline");
981 if (!fragment_timeline_node)
982 fragment_timeline_node = find_child_node_by_name(period_segmentlist_node, "SegmentTimeline");
983 if (fragment_timeline_node) {
984 fragment_timeline_node = xmlFirstElementChild(fragment_timeline_node);
985 while (fragment_timeline_node) {
986 ret = parse_manifest_segmenttimeline(s, rep, fragment_timeline_node);
987 if (ret < 0)
988 goto free;
989 fragment_timeline_node = xmlNextElementSibling(fragment_timeline_node);
990 }
991 }
992 } else if (representation_baseurl_node && !representation_segmentlist_node) {
993 seg = av_mallocz(sizeof(struct fragment));
994 if (!seg)
995 goto enomem;
996 ret = av_dynarray_add_nofree(&rep->fragments, &rep->n_fragments, seg);
997 if (ret < 0) {
998 av_free(seg);
999 goto free;
1000 }
1001 seg->url = get_content_url(baseurl_nodes, 4, c->max_url_size,
1002 rep->id, rep_bandwidth_val, NULL);
1003 if (!seg->url)
1004 goto enomem;
1005 seg->size = -1;
1006 } else if (representation_segmentlist_node) {
1007 // TODO: https://www.brendanlong.com/the-structure-of-an-mpeg-dash-mpd.html
1008 // http://www-itec.uni-klu.ac.at/dash/ddash/mpdGenerator.php?fragmentlength=15&type=full
1009 xmlNodePtr fragmenturl_node = NULL;
1010 segmentlists_tab[0] = representation_segmentlist_node;
1011 segmentlists_tab[1] = adaptionset_segmentlist_node;
1012 segmentlists_tab[2] = period_segmentlist_node;
1013
1014 val = get_val_from_nodes_tab(segmentlists_tab, 3, "duration");
1015 if (val) {
1016 rep->fragment_duration = (int64_t) strtoll(val, NULL, 10);
1017 av_log(s, AV_LOG_TRACE, "rep->fragment_duration = [%"PRId64"]\n", rep->fragment_duration);
1018 xmlFree(val);
1019 }
1020 val = get_val_from_nodes_tab(segmentlists_tab, 3, "timescale");
1021 if (val) {
1022 rep->fragment_timescale = (int64_t) strtoll(val, NULL, 10);
1023 av_log(s, AV_LOG_TRACE, "rep->fragment_timescale = [%"PRId64"]\n", rep->fragment_timescale);
1024 xmlFree(val);
1025 }
1026 val = get_val_from_nodes_tab(segmentlists_tab, 3, "startNumber");
1027 if (val) {
1028 rep->start_number = rep->first_seq_no = (int64_t) strtoll(val, NULL, 10);
1029 av_log(s, AV_LOG_TRACE, "rep->first_seq_no = [%"PRId64"]\n", rep->first_seq_no);
1030 xmlFree(val);
1031 }
1032
1033 fragmenturl_node = xmlFirstElementChild(representation_segmentlist_node);
1034 while (fragmenturl_node) {
1035 ret = parse_manifest_segmenturlnode(s, rep, fragmenturl_node,
1036 baseurl_nodes, rep->id,
1037 rep_bandwidth_val);
1038 if (ret < 0)
1039 goto free;
1040 fragmenturl_node = xmlNextElementSibling(fragmenturl_node);
1041 }
1042
1043 fragment_timeline_node = find_child_node_by_name(adaptionset_segmentlist_node, "SegmentTimeline");
1044 if (!fragment_timeline_node)
1045 fragment_timeline_node = find_child_node_by_name(period_segmentlist_node, "SegmentTimeline");
1046 if (fragment_timeline_node) {
1047 fragment_timeline_node = xmlFirstElementChild(fragment_timeline_node);
1048 while (fragment_timeline_node) {
1049 ret = parse_manifest_segmenttimeline(s, rep, fragment_timeline_node);
1050 if (ret < 0)
1051 goto free;
1052 fragment_timeline_node = xmlNextElementSibling(fragment_timeline_node);
1053 }
1054 }
1055 } else {
1056 av_log(s, AV_LOG_ERROR, "Unknown format of Representation node id '%s' \n",
1057 rep->id ? rep->id : "");
1058 goto free;
1059 }
1060
1061 if (rep->fragment_duration > 0 && !rep->fragment_timescale)
1062 rep->fragment_timescale = 1;
1063 rep->bandwidth = rep_bandwidth_val ? atoi(rep_bandwidth_val) : 0;
1064 rep->framerate = av_make_q(0, 0);
1065 if (type == AVMEDIA_TYPE_VIDEO) {
1066 char *rep_framerate_val = xmlGetProp(representation_node, "frameRate");
1067 if (rep_framerate_val) {
1068 ret = av_parse_video_rate(&rep->framerate, rep_framerate_val);
1069 if (ret < 0)
1070 av_log(s, AV_LOG_VERBOSE, "Ignoring invalid frame rate '%s'\n", rep_framerate_val);
1071 xmlFree(rep_framerate_val);
1072 }
1073 }
1074
1075 switch (type) {
1076 case AVMEDIA_TYPE_VIDEO:
1077 ret = av_dynarray_add_nofree(&c->videos, &c->n_videos, rep);
1078 break;
1079 case AVMEDIA_TYPE_AUDIO:
1080 ret = av_dynarray_add_nofree(&c->audios, &c->n_audios, rep);
1081 break;
1082 case AVMEDIA_TYPE_SUBTITLE:
1083 ret = av_dynarray_add_nofree(&c->subtitles, &c->n_subtitles, rep);
1084 break;
1085 }
1086 if (ret < 0)
1087 goto free;
1088
1089 end:
1090 if (rep_bandwidth_val)
1091 xmlFree(rep_bandwidth_val);
1092
1093 return ret;
1094 enomem:
1095 ret = AVERROR(ENOMEM);
1096 free:
1097 free_representation(rep);
1098 goto end;
1099 }
1100
parse_manifest_adaptationset_attr(AVFormatContext * s,xmlNodePtr adaptionset_node)1101 static int parse_manifest_adaptationset_attr(AVFormatContext *s, xmlNodePtr adaptionset_node)
1102 {
1103 DASHContext *c = s->priv_data;
1104
1105 if (!adaptionset_node) {
1106 av_log(s, AV_LOG_WARNING, "Cannot get AdaptionSet\n");
1107 return AVERROR(EINVAL);
1108 }
1109 c->adaptionset_lang = xmlGetProp(adaptionset_node, "lang");
1110
1111 return 0;
1112 }
1113
parse_manifest_adaptationset(AVFormatContext * s,const char * url,xmlNodePtr adaptionset_node,xmlNodePtr mpd_baseurl_node,xmlNodePtr period_baseurl_node,xmlNodePtr period_segmenttemplate_node,xmlNodePtr period_segmentlist_node)1114 static int parse_manifest_adaptationset(AVFormatContext *s, const char *url,
1115 xmlNodePtr adaptionset_node,
1116 xmlNodePtr mpd_baseurl_node,
1117 xmlNodePtr period_baseurl_node,
1118 xmlNodePtr period_segmenttemplate_node,
1119 xmlNodePtr period_segmentlist_node)
1120 {
1121 int ret = 0;
1122 DASHContext *c = s->priv_data;
1123 xmlNodePtr fragment_template_node = NULL;
1124 xmlNodePtr content_component_node = NULL;
1125 xmlNodePtr adaptionset_baseurl_node = NULL;
1126 xmlNodePtr adaptionset_segmentlist_node = NULL;
1127 xmlNodePtr adaptionset_supplementalproperty_node = NULL;
1128 xmlNodePtr node = NULL;
1129
1130 ret = parse_manifest_adaptationset_attr(s, adaptionset_node);
1131 if (ret < 0)
1132 return ret;
1133
1134 node = xmlFirstElementChild(adaptionset_node);
1135 while (node) {
1136 if (!av_strcasecmp(node->name, "SegmentTemplate")) {
1137 fragment_template_node = node;
1138 } else if (!av_strcasecmp(node->name, "ContentComponent")) {
1139 content_component_node = node;
1140 } else if (!av_strcasecmp(node->name, "BaseURL")) {
1141 adaptionset_baseurl_node = node;
1142 } else if (!av_strcasecmp(node->name, "SegmentList")) {
1143 adaptionset_segmentlist_node = node;
1144 } else if (!av_strcasecmp(node->name, "SupplementalProperty")) {
1145 adaptionset_supplementalproperty_node = node;
1146 } else if (!av_strcasecmp(node->name, "Representation")) {
1147 ret = parse_manifest_representation(s, url, node,
1148 adaptionset_node,
1149 mpd_baseurl_node,
1150 period_baseurl_node,
1151 period_segmenttemplate_node,
1152 period_segmentlist_node,
1153 fragment_template_node,
1154 content_component_node,
1155 adaptionset_baseurl_node,
1156 adaptionset_segmentlist_node,
1157 adaptionset_supplementalproperty_node);
1158 if (ret < 0)
1159 goto err;
1160 }
1161 node = xmlNextElementSibling(node);
1162 }
1163
1164 err:
1165 xmlFree(c->adaptionset_lang);
1166 c->adaptionset_lang = NULL;
1167 return ret;
1168 }
1169
parse_programinformation(AVFormatContext * s,xmlNodePtr node)1170 static int parse_programinformation(AVFormatContext *s, xmlNodePtr node)
1171 {
1172 xmlChar *val = NULL;
1173
1174 node = xmlFirstElementChild(node);
1175 while (node) {
1176 if (!av_strcasecmp(node->name, "Title")) {
1177 val = xmlNodeGetContent(node);
1178 if (val) {
1179 av_dict_set(&s->metadata, "Title", val, 0);
1180 }
1181 } else if (!av_strcasecmp(node->name, "Source")) {
1182 val = xmlNodeGetContent(node);
1183 if (val) {
1184 av_dict_set(&s->metadata, "Source", val, 0);
1185 }
1186 } else if (!av_strcasecmp(node->name, "Copyright")) {
1187 val = xmlNodeGetContent(node);
1188 if (val) {
1189 av_dict_set(&s->metadata, "Copyright", val, 0);
1190 }
1191 }
1192 node = xmlNextElementSibling(node);
1193 xmlFree(val);
1194 val = NULL;
1195 }
1196 return 0;
1197 }
1198
parse_manifest(AVFormatContext * s,const char * url,AVIOContext * in)1199 static int parse_manifest(AVFormatContext *s, const char *url, AVIOContext *in)
1200 {
1201 DASHContext *c = s->priv_data;
1202 int ret = 0;
1203 int close_in = 0;
1204 AVBPrint buf;
1205 AVDictionary *opts = NULL;
1206 xmlDoc *doc = NULL;
1207 xmlNodePtr root_element = NULL;
1208 xmlNodePtr node = NULL;
1209 xmlNodePtr period_node = NULL;
1210 xmlNodePtr tmp_node = NULL;
1211 xmlNodePtr mpd_baseurl_node = NULL;
1212 xmlNodePtr period_baseurl_node = NULL;
1213 xmlNodePtr period_segmenttemplate_node = NULL;
1214 xmlNodePtr period_segmentlist_node = NULL;
1215 xmlNodePtr adaptionset_node = NULL;
1216 xmlAttrPtr attr = NULL;
1217 char *val = NULL;
1218 uint32_t period_duration_sec = 0;
1219 uint32_t period_start_sec = 0;
1220
1221 if (!in) {
1222 close_in = 1;
1223
1224 av_dict_copy(&opts, c->avio_opts, 0);
1225 ret = avio_open2(&in, url, AVIO_FLAG_READ, c->interrupt_callback, &opts);
1226 av_dict_free(&opts);
1227 if (ret < 0)
1228 return ret;
1229 }
1230
1231 if (av_opt_get(in, "location", AV_OPT_SEARCH_CHILDREN, (uint8_t**)&c->base_url) < 0)
1232 c->base_url = av_strdup(url);
1233
1234 av_bprint_init(&buf, 0, INT_MAX); // xmlReadMemory uses integer bufsize
1235
1236 if ((ret = avio_read_to_bprint(in, &buf, SIZE_MAX)) < 0 ||
1237 !avio_feof(in)) {
1238 av_log(s, AV_LOG_ERROR, "Unable to read to manifest '%s'\n", url);
1239 if (ret == 0)
1240 ret = AVERROR_INVALIDDATA;
1241 } else {
1242 LIBXML_TEST_VERSION
1243
1244 doc = xmlReadMemory(buf.str, buf.len, c->base_url, NULL, 0);
1245 root_element = xmlDocGetRootElement(doc);
1246 node = root_element;
1247
1248 if (!node) {
1249 ret = AVERROR_INVALIDDATA;
1250 av_log(s, AV_LOG_ERROR, "Unable to parse '%s' - missing root node\n", url);
1251 goto cleanup;
1252 }
1253
1254 if (node->type != XML_ELEMENT_NODE ||
1255 av_strcasecmp(node->name, "MPD")) {
1256 ret = AVERROR_INVALIDDATA;
1257 av_log(s, AV_LOG_ERROR, "Unable to parse '%s' - wrong root node name[%s] type[%d]\n", url, node->name, (int)node->type);
1258 goto cleanup;
1259 }
1260
1261 val = xmlGetProp(node, "type");
1262 if (!val) {
1263 av_log(s, AV_LOG_ERROR, "Unable to parse '%s' - missing type attrib\n", url);
1264 ret = AVERROR_INVALIDDATA;
1265 goto cleanup;
1266 }
1267 if (!av_strcasecmp(val, "dynamic"))
1268 c->is_live = 1;
1269 xmlFree(val);
1270
1271 attr = node->properties;
1272 while (attr) {
1273 val = xmlGetProp(node, attr->name);
1274
1275 if (!av_strcasecmp(attr->name, "availabilityStartTime")) {
1276 c->availability_start_time = get_utc_date_time_insec(s, val);
1277 av_log(s, AV_LOG_TRACE, "c->availability_start_time = [%"PRId64"]\n", c->availability_start_time);
1278 } else if (!av_strcasecmp(attr->name, "availabilityEndTime")) {
1279 c->availability_end_time = get_utc_date_time_insec(s, val);
1280 av_log(s, AV_LOG_TRACE, "c->availability_end_time = [%"PRId64"]\n", c->availability_end_time);
1281 } else if (!av_strcasecmp(attr->name, "publishTime")) {
1282 c->publish_time = get_utc_date_time_insec(s, val);
1283 av_log(s, AV_LOG_TRACE, "c->publish_time = [%"PRId64"]\n", c->publish_time);
1284 } else if (!av_strcasecmp(attr->name, "minimumUpdatePeriod")) {
1285 c->minimum_update_period = get_duration_insec(s, val);
1286 av_log(s, AV_LOG_TRACE, "c->minimum_update_period = [%"PRId64"]\n", c->minimum_update_period);
1287 } else if (!av_strcasecmp(attr->name, "timeShiftBufferDepth")) {
1288 c->time_shift_buffer_depth = get_duration_insec(s, val);
1289 av_log(s, AV_LOG_TRACE, "c->time_shift_buffer_depth = [%"PRId64"]\n", c->time_shift_buffer_depth);
1290 } else if (!av_strcasecmp(attr->name, "minBufferTime")) {
1291 c->min_buffer_time = get_duration_insec(s, val);
1292 av_log(s, AV_LOG_TRACE, "c->min_buffer_time = [%"PRId64"]\n", c->min_buffer_time);
1293 } else if (!av_strcasecmp(attr->name, "suggestedPresentationDelay")) {
1294 c->suggested_presentation_delay = get_duration_insec(s, val);
1295 av_log(s, AV_LOG_TRACE, "c->suggested_presentation_delay = [%"PRId64"]\n", c->suggested_presentation_delay);
1296 } else if (!av_strcasecmp(attr->name, "mediaPresentationDuration")) {
1297 c->media_presentation_duration = get_duration_insec(s, val);
1298 av_log(s, AV_LOG_TRACE, "c->media_presentation_duration = [%"PRId64"]\n", c->media_presentation_duration);
1299 }
1300 attr = attr->next;
1301 xmlFree(val);
1302 }
1303
1304 tmp_node = find_child_node_by_name(node, "BaseURL");
1305 if (tmp_node) {
1306 mpd_baseurl_node = xmlCopyNode(tmp_node,1);
1307 } else {
1308 mpd_baseurl_node = xmlNewNode(NULL, "BaseURL");
1309 }
1310
1311 // at now we can handle only one period, with the longest duration
1312 node = xmlFirstElementChild(node);
1313 while (node) {
1314 if (!av_strcasecmp(node->name, "Period")) {
1315 period_duration_sec = 0;
1316 period_start_sec = 0;
1317 attr = node->properties;
1318 while (attr) {
1319 val = xmlGetProp(node, attr->name);
1320 if (!av_strcasecmp(attr->name, "duration")) {
1321 period_duration_sec = get_duration_insec(s, val);
1322 } else if (!av_strcasecmp(attr->name, "start")) {
1323 period_start_sec = get_duration_insec(s, val);
1324 }
1325 attr = attr->next;
1326 xmlFree(val);
1327 }
1328 if ((period_duration_sec) >= (c->period_duration)) {
1329 period_node = node;
1330 c->period_duration = period_duration_sec;
1331 c->period_start = period_start_sec;
1332 if (c->period_start > 0)
1333 c->media_presentation_duration = c->period_duration;
1334 }
1335 } else if (!av_strcasecmp(node->name, "ProgramInformation")) {
1336 parse_programinformation(s, node);
1337 }
1338 node = xmlNextElementSibling(node);
1339 }
1340 if (!period_node) {
1341 av_log(s, AV_LOG_ERROR, "Unable to parse '%s' - missing Period node\n", url);
1342 ret = AVERROR_INVALIDDATA;
1343 goto cleanup;
1344 }
1345
1346 adaptionset_node = xmlFirstElementChild(period_node);
1347 while (adaptionset_node) {
1348 if (!av_strcasecmp(adaptionset_node->name, "BaseURL")) {
1349 period_baseurl_node = adaptionset_node;
1350 } else if (!av_strcasecmp(adaptionset_node->name, "SegmentTemplate")) {
1351 period_segmenttemplate_node = adaptionset_node;
1352 } else if (!av_strcasecmp(adaptionset_node->name, "SegmentList")) {
1353 period_segmentlist_node = adaptionset_node;
1354 } else if (!av_strcasecmp(adaptionset_node->name, "AdaptationSet")) {
1355 parse_manifest_adaptationset(s, url, adaptionset_node, mpd_baseurl_node, period_baseurl_node, period_segmenttemplate_node, period_segmentlist_node);
1356 }
1357 adaptionset_node = xmlNextElementSibling(adaptionset_node);
1358 }
1359 cleanup:
1360 /*free the document */
1361 xmlFreeDoc(doc);
1362 xmlCleanupParser();
1363 xmlFreeNode(mpd_baseurl_node);
1364 }
1365
1366 av_bprint_finalize(&buf, NULL);
1367 if (close_in) {
1368 avio_close(in);
1369 }
1370 return ret;
1371 }
1372
calc_cur_seg_no(AVFormatContext * s,struct representation * pls)1373 static int64_t calc_cur_seg_no(AVFormatContext *s, struct representation *pls)
1374 {
1375 DASHContext *c = s->priv_data;
1376 int64_t num = 0;
1377 int64_t start_time_offset = 0;
1378
1379 if (c->is_live) {
1380 if (pls->n_fragments) {
1381 av_log(s, AV_LOG_TRACE, "in n_fragments mode\n");
1382 num = pls->first_seq_no;
1383 } else if (pls->n_timelines) {
1384 av_log(s, AV_LOG_TRACE, "in n_timelines mode\n");
1385 start_time_offset = get_segment_start_time_based_on_timeline(pls, 0xFFFFFFFF) - 60 * pls->fragment_timescale; // 60 seconds before end
1386 num = calc_next_seg_no_from_timelines(pls, start_time_offset);
1387 if (num == -1)
1388 num = pls->first_seq_no;
1389 else
1390 num += pls->first_seq_no;
1391 } else if (pls->fragment_duration){
1392 av_log(s, AV_LOG_TRACE, "in fragment_duration mode fragment_timescale = %"PRId64", presentation_timeoffset = %"PRId64"\n", pls->fragment_timescale, pls->presentation_timeoffset);
1393 if (pls->presentation_timeoffset) {
1394 num = pls->first_seq_no + (((get_current_time_in_sec() - c->availability_start_time) * pls->fragment_timescale)-pls->presentation_timeoffset) / pls->fragment_duration - c->min_buffer_time;
1395 } else if (c->publish_time > 0 && !c->availability_start_time) {
1396 if (c->min_buffer_time) {
1397 num = pls->first_seq_no + (((c->publish_time + pls->fragment_duration) - c->suggested_presentation_delay) * pls->fragment_timescale) / pls->fragment_duration - c->min_buffer_time;
1398 } else {
1399 num = pls->first_seq_no + (((c->publish_time - c->time_shift_buffer_depth + pls->fragment_duration) - c->suggested_presentation_delay) * pls->fragment_timescale) / pls->fragment_duration;
1400 }
1401 } else {
1402 num = pls->first_seq_no + (((get_current_time_in_sec() - c->availability_start_time) - c->suggested_presentation_delay) * pls->fragment_timescale) / pls->fragment_duration;
1403 }
1404 }
1405 } else {
1406 num = pls->first_seq_no;
1407 }
1408 return num;
1409 }
1410
calc_min_seg_no(AVFormatContext * s,struct representation * pls)1411 static int64_t calc_min_seg_no(AVFormatContext *s, struct representation *pls)
1412 {
1413 DASHContext *c = s->priv_data;
1414 int64_t num = 0;
1415
1416 if (c->is_live && pls->fragment_duration) {
1417 av_log(s, AV_LOG_TRACE, "in live mode\n");
1418 num = pls->first_seq_no + (((get_current_time_in_sec() - c->availability_start_time) - c->time_shift_buffer_depth) * pls->fragment_timescale) / pls->fragment_duration;
1419 } else {
1420 num = pls->first_seq_no;
1421 }
1422 return num;
1423 }
1424
calc_max_seg_no(struct representation * pls,DASHContext * c)1425 static int64_t calc_max_seg_no(struct representation *pls, DASHContext *c)
1426 {
1427 int64_t num = 0;
1428
1429 if (pls->n_fragments) {
1430 num = pls->first_seq_no + pls->n_fragments - 1;
1431 } else if (pls->n_timelines) {
1432 int i = 0;
1433 num = pls->first_seq_no + pls->n_timelines - 1;
1434 for (i = 0; i < pls->n_timelines; i++) {
1435 if (pls->timelines[i]->repeat == -1) {
1436 int length_of_each_segment = pls->timelines[i]->duration / pls->fragment_timescale;
1437 num = c->period_duration / length_of_each_segment;
1438 } else {
1439 num += pls->timelines[i]->repeat;
1440 }
1441 }
1442 } else if (c->is_live && pls->fragment_duration) {
1443 num = pls->first_seq_no + (((get_current_time_in_sec() - c->availability_start_time)) * pls->fragment_timescale) / pls->fragment_duration;
1444 } else if (pls->fragment_duration) {
1445 num = pls->first_seq_no + av_rescale_rnd(1, c->media_presentation_duration * pls->fragment_timescale, pls->fragment_duration, AV_ROUND_UP);
1446 }
1447
1448 return num;
1449 }
1450
move_timelines(struct representation * rep_src,struct representation * rep_dest,DASHContext * c)1451 static void move_timelines(struct representation *rep_src, struct representation *rep_dest, DASHContext *c)
1452 {
1453 if (rep_dest && rep_src ) {
1454 free_timelines_list(rep_dest);
1455 rep_dest->timelines = rep_src->timelines;
1456 rep_dest->n_timelines = rep_src->n_timelines;
1457 rep_dest->first_seq_no = rep_src->first_seq_no;
1458 rep_dest->last_seq_no = calc_max_seg_no(rep_dest, c);
1459 rep_src->timelines = NULL;
1460 rep_src->n_timelines = 0;
1461 rep_dest->cur_seq_no = rep_src->cur_seq_no;
1462 }
1463 }
1464
move_segments(struct representation * rep_src,struct representation * rep_dest,DASHContext * c)1465 static void move_segments(struct representation *rep_src, struct representation *rep_dest, DASHContext *c)
1466 {
1467 if (rep_dest && rep_src ) {
1468 free_fragment_list(rep_dest);
1469 if (rep_src->start_number > (rep_dest->start_number + rep_dest->n_fragments))
1470 rep_dest->cur_seq_no = 0;
1471 else
1472 rep_dest->cur_seq_no += rep_src->start_number - rep_dest->start_number;
1473 rep_dest->fragments = rep_src->fragments;
1474 rep_dest->n_fragments = rep_src->n_fragments;
1475 rep_dest->parent = rep_src->parent;
1476 rep_dest->last_seq_no = calc_max_seg_no(rep_dest, c);
1477 rep_src->fragments = NULL;
1478 rep_src->n_fragments = 0;
1479 }
1480 }
1481
1482
refresh_manifest(AVFormatContext * s)1483 static int refresh_manifest(AVFormatContext *s)
1484 {
1485 int ret = 0, i;
1486 DASHContext *c = s->priv_data;
1487 // save current context
1488 int n_videos = c->n_videos;
1489 struct representation **videos = c->videos;
1490 int n_audios = c->n_audios;
1491 struct representation **audios = c->audios;
1492 int n_subtitles = c->n_subtitles;
1493 struct representation **subtitles = c->subtitles;
1494 char *base_url = c->base_url;
1495
1496 c->base_url = NULL;
1497 c->n_videos = 0;
1498 c->videos = NULL;
1499 c->n_audios = 0;
1500 c->audios = NULL;
1501 c->n_subtitles = 0;
1502 c->subtitles = NULL;
1503 ret = parse_manifest(s, s->url, NULL);
1504 if (ret)
1505 goto finish;
1506
1507 if (c->n_videos != n_videos) {
1508 av_log(c, AV_LOG_ERROR,
1509 "new manifest has mismatched no. of video representations, %d -> %d\n",
1510 n_videos, c->n_videos);
1511 return AVERROR_INVALIDDATA;
1512 }
1513 if (c->n_audios != n_audios) {
1514 av_log(c, AV_LOG_ERROR,
1515 "new manifest has mismatched no. of audio representations, %d -> %d\n",
1516 n_audios, c->n_audios);
1517 return AVERROR_INVALIDDATA;
1518 }
1519 if (c->n_subtitles != n_subtitles) {
1520 av_log(c, AV_LOG_ERROR,
1521 "new manifest has mismatched no. of subtitles representations, %d -> %d\n",
1522 n_subtitles, c->n_subtitles);
1523 return AVERROR_INVALIDDATA;
1524 }
1525
1526 for (i = 0; i < n_videos; i++) {
1527 struct representation *cur_video = videos[i];
1528 struct representation *ccur_video = c->videos[i];
1529 if (cur_video->timelines) {
1530 // calc current time
1531 int64_t currentTime = get_segment_start_time_based_on_timeline(cur_video, cur_video->cur_seq_no) / cur_video->fragment_timescale;
1532 // update segments
1533 ccur_video->cur_seq_no = calc_next_seg_no_from_timelines(ccur_video, currentTime * cur_video->fragment_timescale - 1);
1534 if (ccur_video->cur_seq_no >= 0) {
1535 move_timelines(ccur_video, cur_video, c);
1536 }
1537 }
1538 if (cur_video->fragments) {
1539 move_segments(ccur_video, cur_video, c);
1540 }
1541 }
1542 for (i = 0; i < n_audios; i++) {
1543 struct representation *cur_audio = audios[i];
1544 struct representation *ccur_audio = c->audios[i];
1545 if (cur_audio->timelines) {
1546 // calc current time
1547 int64_t currentTime = get_segment_start_time_based_on_timeline(cur_audio, cur_audio->cur_seq_no) / cur_audio->fragment_timescale;
1548 // update segments
1549 ccur_audio->cur_seq_no = calc_next_seg_no_from_timelines(ccur_audio, currentTime * cur_audio->fragment_timescale - 1);
1550 if (ccur_audio->cur_seq_no >= 0) {
1551 move_timelines(ccur_audio, cur_audio, c);
1552 }
1553 }
1554 if (cur_audio->fragments) {
1555 move_segments(ccur_audio, cur_audio, c);
1556 }
1557 }
1558
1559 finish:
1560 // restore context
1561 if (c->base_url)
1562 av_free(base_url);
1563 else
1564 c->base_url = base_url;
1565
1566 if (c->subtitles)
1567 free_subtitle_list(c);
1568 if (c->audios)
1569 free_audio_list(c);
1570 if (c->videos)
1571 free_video_list(c);
1572
1573 c->n_subtitles = n_subtitles;
1574 c->subtitles = subtitles;
1575 c->n_audios = n_audios;
1576 c->audios = audios;
1577 c->n_videos = n_videos;
1578 c->videos = videos;
1579 return ret;
1580 }
1581
get_current_fragment(struct representation * pls)1582 static struct fragment *get_current_fragment(struct representation *pls)
1583 {
1584 int64_t min_seq_no = 0;
1585 int64_t max_seq_no = 0;
1586 struct fragment *seg = NULL;
1587 struct fragment *seg_ptr = NULL;
1588 DASHContext *c = pls->parent->priv_data;
1589
1590 while (( !ff_check_interrupt(c->interrupt_callback)&& pls->n_fragments > 0)) {
1591 if (pls->cur_seq_no < pls->n_fragments) {
1592 seg_ptr = pls->fragments[pls->cur_seq_no];
1593 seg = av_mallocz(sizeof(struct fragment));
1594 if (!seg) {
1595 return NULL;
1596 }
1597 seg->url = av_strdup(seg_ptr->url);
1598 if (!seg->url) {
1599 av_free(seg);
1600 return NULL;
1601 }
1602 seg->size = seg_ptr->size;
1603 seg->url_offset = seg_ptr->url_offset;
1604 return seg;
1605 } else if (c->is_live) {
1606 refresh_manifest(pls->parent);
1607 } else {
1608 break;
1609 }
1610 }
1611 if (c->is_live) {
1612 min_seq_no = calc_min_seg_no(pls->parent, pls);
1613 max_seq_no = calc_max_seg_no(pls, c);
1614
1615 if (pls->timelines || pls->fragments) {
1616 refresh_manifest(pls->parent);
1617 }
1618 if (pls->cur_seq_no <= min_seq_no) {
1619 av_log(pls->parent, AV_LOG_VERBOSE, "old fragment: cur[%"PRId64"] min[%"PRId64"] max[%"PRId64"]\n", (int64_t)pls->cur_seq_no, min_seq_no, max_seq_no);
1620 pls->cur_seq_no = calc_cur_seg_no(pls->parent, pls);
1621 } else if (pls->cur_seq_no > max_seq_no) {
1622 av_log(pls->parent, AV_LOG_VERBOSE, "new fragment: min[%"PRId64"] max[%"PRId64"]\n", min_seq_no, max_seq_no);
1623 }
1624 seg = av_mallocz(sizeof(struct fragment));
1625 if (!seg) {
1626 return NULL;
1627 }
1628 } else if (pls->cur_seq_no <= pls->last_seq_no) {
1629 seg = av_mallocz(sizeof(struct fragment));
1630 if (!seg) {
1631 return NULL;
1632 }
1633 }
1634 if (seg) {
1635 char *tmpfilename;
1636 if (!pls->url_template) {
1637 av_log(pls->parent, AV_LOG_ERROR, "Cannot get fragment, missing template URL\n");
1638 av_free(seg);
1639 return NULL;
1640 }
1641 tmpfilename = av_mallocz(c->max_url_size);
1642 if (!tmpfilename) {
1643 av_free(seg);
1644 return NULL;
1645 }
1646 ff_dash_fill_tmpl_params(tmpfilename, c->max_url_size, pls->url_template, 0, pls->cur_seq_no, 0, get_segment_start_time_based_on_timeline(pls, pls->cur_seq_no));
1647 seg->url = av_strireplace(pls->url_template, pls->url_template, tmpfilename);
1648 if (!seg->url) {
1649 av_log(pls->parent, AV_LOG_WARNING, "Unable to resolve template url '%s', try to use origin template\n", pls->url_template);
1650 seg->url = av_strdup(pls->url_template);
1651 if (!seg->url) {
1652 av_log(pls->parent, AV_LOG_ERROR, "Cannot resolve template url '%s'\n", pls->url_template);
1653 av_free(tmpfilename);
1654 av_free(seg);
1655 return NULL;
1656 }
1657 }
1658 av_free(tmpfilename);
1659 seg->size = -1;
1660 }
1661
1662 return seg;
1663 }
1664
read_from_url(struct representation * pls,struct fragment * seg,uint8_t * buf,int buf_size)1665 static int read_from_url(struct representation *pls, struct fragment *seg,
1666 uint8_t *buf, int buf_size)
1667 {
1668 int ret;
1669
1670 /* limit read if the fragment was only a part of a file */
1671 if (seg->size >= 0)
1672 buf_size = FFMIN(buf_size, pls->cur_seg_size - pls->cur_seg_offset);
1673
1674 ret = avio_read(pls->input, buf, buf_size);
1675 if (ret > 0)
1676 pls->cur_seg_offset += ret;
1677
1678 return ret;
1679 }
1680
open_input(DASHContext * c,struct representation * pls,struct fragment * seg)1681 static int open_input(DASHContext *c, struct representation *pls, struct fragment *seg)
1682 {
1683 AVDictionary *opts = NULL;
1684 char *url = NULL;
1685 int ret = 0;
1686
1687 url = av_mallocz(c->max_url_size);
1688 if (!url) {
1689 ret = AVERROR(ENOMEM);
1690 goto cleanup;
1691 }
1692
1693 if (seg->size >= 0) {
1694 /* try to restrict the HTTP request to the part we want
1695 * (if this is in fact a HTTP request) */
1696 av_dict_set_int(&opts, "offset", seg->url_offset, 0);
1697 av_dict_set_int(&opts, "end_offset", seg->url_offset + seg->size, 0);
1698 }
1699
1700 ff_make_absolute_url(url, c->max_url_size, c->base_url, seg->url);
1701 av_log(pls->parent, AV_LOG_VERBOSE, "DASH request for url '%s', offset %"PRId64"\n",
1702 url, seg->url_offset);
1703 ret = open_url(pls->parent, &pls->input, url, &c->avio_opts, opts, NULL);
1704
1705 cleanup:
1706 av_free(url);
1707 av_dict_free(&opts);
1708 pls->cur_seg_offset = 0;
1709 pls->cur_seg_size = seg->size;
1710 return ret;
1711 }
1712
update_init_section(struct representation * pls)1713 static int update_init_section(struct representation *pls)
1714 {
1715 static const int max_init_section_size = 1024 * 1024;
1716 DASHContext *c = pls->parent->priv_data;
1717 int64_t sec_size;
1718 int64_t urlsize;
1719 int ret;
1720
1721 if (!pls->init_section || pls->init_sec_buf)
1722 return 0;
1723
1724 ret = open_input(c, pls, pls->init_section);
1725 if (ret < 0) {
1726 av_log(pls->parent, AV_LOG_WARNING,
1727 "Failed to open an initialization section\n");
1728 return ret;
1729 }
1730
1731 if (pls->init_section->size >= 0)
1732 sec_size = pls->init_section->size;
1733 else if ((urlsize = avio_size(pls->input)) >= 0)
1734 sec_size = urlsize;
1735 else
1736 sec_size = max_init_section_size;
1737
1738 av_log(pls->parent, AV_LOG_DEBUG,
1739 "Downloading an initialization section of size %"PRId64"\n",
1740 sec_size);
1741
1742 sec_size = FFMIN(sec_size, max_init_section_size);
1743
1744 av_fast_malloc(&pls->init_sec_buf, &pls->init_sec_buf_size, sec_size);
1745
1746 ret = read_from_url(pls, pls->init_section, pls->init_sec_buf,
1747 pls->init_sec_buf_size);
1748 ff_format_io_close(pls->parent, &pls->input);
1749
1750 if (ret < 0)
1751 return ret;
1752
1753 pls->init_sec_data_len = ret;
1754 pls->init_sec_buf_read_offset = 0;
1755
1756 return 0;
1757 }
1758
seek_data(void * opaque,int64_t offset,int whence)1759 static int64_t seek_data(void *opaque, int64_t offset, int whence)
1760 {
1761 struct representation *v = opaque;
1762 if (v->n_fragments && !v->init_sec_data_len) {
1763 return avio_seek(v->input, offset, whence);
1764 }
1765
1766 return AVERROR(ENOSYS);
1767 }
1768
read_data(void * opaque,uint8_t * buf,int buf_size)1769 static int read_data(void *opaque, uint8_t *buf, int buf_size)
1770 {
1771 int ret = 0;
1772 struct representation *v = opaque;
1773 DASHContext *c = v->parent->priv_data;
1774
1775 restart:
1776 if (!v->input) {
1777 free_fragment(&v->cur_seg);
1778 v->cur_seg = get_current_fragment(v);
1779 if (!v->cur_seg) {
1780 ret = AVERROR_EOF;
1781 goto end;
1782 }
1783
1784 /* load/update Media Initialization Section, if any */
1785 ret = update_init_section(v);
1786 if (ret)
1787 goto end;
1788
1789 ret = open_input(c, v, v->cur_seg);
1790 if (ret < 0) {
1791 if (ff_check_interrupt(c->interrupt_callback)) {
1792 ret = AVERROR_EXIT;
1793 goto end;
1794 }
1795 av_log(v->parent, AV_LOG_WARNING, "Failed to open fragment of playlist\n");
1796 v->cur_seq_no++;
1797 goto restart;
1798 }
1799 }
1800
1801 if (v->init_sec_buf_read_offset < v->init_sec_data_len) {
1802 /* Push init section out first before first actual fragment */
1803 int copy_size = FFMIN(v->init_sec_data_len - v->init_sec_buf_read_offset, buf_size);
1804 memcpy(buf, v->init_sec_buf, copy_size);
1805 v->init_sec_buf_read_offset += copy_size;
1806 ret = copy_size;
1807 goto end;
1808 }
1809
1810 /* check the v->cur_seg, if it is null, get current and double check if the new v->cur_seg*/
1811 if (!v->cur_seg) {
1812 v->cur_seg = get_current_fragment(v);
1813 }
1814 if (!v->cur_seg) {
1815 ret = AVERROR_EOF;
1816 goto end;
1817 }
1818 ret = read_from_url(v, v->cur_seg, buf, buf_size);
1819 if (ret > 0)
1820 goto end;
1821
1822 if (c->is_live || v->cur_seq_no < v->last_seq_no) {
1823 if (!v->is_restart_needed)
1824 v->cur_seq_no++;
1825 v->is_restart_needed = 1;
1826 }
1827
1828 end:
1829 return ret;
1830 }
1831
nested_io_open(AVFormatContext * s,AVIOContext ** pb,const char * url,int flags,AVDictionary ** opts)1832 static int nested_io_open(AVFormatContext *s, AVIOContext **pb, const char *url,
1833 int flags, AVDictionary **opts)
1834 {
1835 av_log(s, AV_LOG_ERROR,
1836 "A DASH playlist item '%s' referred to an external file '%s'. "
1837 "Opening this file was forbidden for security reasons\n",
1838 s->url, url);
1839 return AVERROR(EPERM);
1840 }
1841
close_demux_for_component(struct representation * pls)1842 static void close_demux_for_component(struct representation *pls)
1843 {
1844 /* note: the internal buffer could have changed */
1845 av_freep(&pls->pb.pub.buffer);
1846 memset(&pls->pb, 0x00, sizeof(pls->pb));
1847 pls->ctx->pb = NULL;
1848 avformat_close_input(&pls->ctx);
1849 }
1850
reopen_demux_for_component(AVFormatContext * s,struct representation * pls)1851 static int reopen_demux_for_component(AVFormatContext *s, struct representation *pls)
1852 {
1853 DASHContext *c = s->priv_data;
1854 const AVInputFormat *in_fmt = NULL;
1855 AVDictionary *in_fmt_opts = NULL;
1856 uint8_t *avio_ctx_buffer = NULL;
1857 int ret = 0, i;
1858
1859 if (pls->ctx) {
1860 close_demux_for_component(pls);
1861 }
1862
1863 if (ff_check_interrupt(&s->interrupt_callback)) {
1864 ret = AVERROR_EXIT;
1865 goto fail;
1866 }
1867
1868 if (!(pls->ctx = avformat_alloc_context())) {
1869 ret = AVERROR(ENOMEM);
1870 goto fail;
1871 }
1872
1873 avio_ctx_buffer = av_malloc(INITIAL_BUFFER_SIZE);
1874 if (!avio_ctx_buffer ) {
1875 ret = AVERROR(ENOMEM);
1876 avformat_free_context(pls->ctx);
1877 pls->ctx = NULL;
1878 goto fail;
1879 }
1880 ffio_init_context(&pls->pb, avio_ctx_buffer, INITIAL_BUFFER_SIZE, 0,
1881 pls, read_data, NULL, c->is_live ? NULL : seek_data);
1882 pls->pb.pub.seekable = 0;
1883
1884 if ((ret = ff_copy_whiteblacklists(pls->ctx, s)) < 0)
1885 goto fail;
1886
1887 pls->ctx->flags = AVFMT_FLAG_CUSTOM_IO;
1888 pls->ctx->probesize = s->probesize > 0 ? s->probesize : 1024 * 4;
1889 pls->ctx->max_analyze_duration = s->max_analyze_duration > 0 ? s->max_analyze_duration : 4 * AV_TIME_BASE;
1890 pls->ctx->interrupt_callback = s->interrupt_callback;
1891 ret = av_probe_input_buffer(&pls->pb.pub, &in_fmt, "", NULL, 0, 0);
1892 if (ret < 0) {
1893 av_log(s, AV_LOG_ERROR, "Error when loading first fragment of playlist\n");
1894 avformat_free_context(pls->ctx);
1895 pls->ctx = NULL;
1896 goto fail;
1897 }
1898
1899 pls->ctx->pb = &pls->pb.pub;
1900 pls->ctx->io_open = nested_io_open;
1901
1902 if (c->cenc_decryption_key)
1903 av_dict_set(&in_fmt_opts, "decryption_key", c->cenc_decryption_key, 0);
1904
1905 // provide additional information from mpd if available
1906 ret = avformat_open_input(&pls->ctx, "", in_fmt, &in_fmt_opts); //pls->init_section->url
1907 av_dict_free(&in_fmt_opts);
1908 if (ret < 0)
1909 goto fail;
1910 if (pls->n_fragments) {
1911 #if FF_API_R_FRAME_RATE
1912 if (pls->framerate.den) {
1913 for (i = 0; i < pls->ctx->nb_streams; i++)
1914 pls->ctx->streams[i]->r_frame_rate = pls->framerate;
1915 }
1916 #endif
1917 ret = avformat_find_stream_info(pls->ctx, NULL);
1918 if (ret < 0)
1919 goto fail;
1920 }
1921
1922 fail:
1923 return ret;
1924 }
1925
open_demux_for_component(AVFormatContext * s,struct representation * pls)1926 static int open_demux_for_component(AVFormatContext *s, struct representation *pls)
1927 {
1928 int ret = 0;
1929 int i;
1930
1931 pls->parent = s;
1932 pls->cur_seq_no = calc_cur_seg_no(s, pls);
1933
1934 if (!pls->last_seq_no) {
1935 pls->last_seq_no = calc_max_seg_no(pls, s->priv_data);
1936 }
1937
1938 ret = reopen_demux_for_component(s, pls);
1939 if (ret < 0) {
1940 goto fail;
1941 }
1942 for (i = 0; i < pls->ctx->nb_streams; i++) {
1943 AVStream *st = avformat_new_stream(s, NULL);
1944 AVStream *ist = pls->ctx->streams[i];
1945 if (!st) {
1946 ret = AVERROR(ENOMEM);
1947 goto fail;
1948 }
1949 st->id = i;
1950 avcodec_parameters_copy(st->codecpar, ist->codecpar);
1951 avpriv_set_pts_info(st, ist->pts_wrap_bits, ist->time_base.num, ist->time_base.den);
1952
1953 // copy disposition
1954 st->disposition = ist->disposition;
1955
1956 // copy side data
1957 for (int i = 0; i < ist->nb_side_data; i++) {
1958 const AVPacketSideData *sd_src = &ist->side_data[i];
1959 uint8_t *dst_data;
1960
1961 dst_data = av_stream_new_side_data(st, sd_src->type, sd_src->size);
1962 if (!dst_data)
1963 return AVERROR(ENOMEM);
1964 memcpy(dst_data, sd_src->data, sd_src->size);
1965 }
1966 }
1967
1968 return 0;
1969 fail:
1970 return ret;
1971 }
1972
is_common_init_section_exist(struct representation ** pls,int n_pls)1973 static int is_common_init_section_exist(struct representation **pls, int n_pls)
1974 {
1975 struct fragment *first_init_section = pls[0]->init_section;
1976 char *url =NULL;
1977 int64_t url_offset = -1;
1978 int64_t size = -1;
1979 int i = 0;
1980
1981 if (first_init_section == NULL || n_pls == 0)
1982 return 0;
1983
1984 url = first_init_section->url;
1985 url_offset = first_init_section->url_offset;
1986 size = pls[0]->init_section->size;
1987 for (i=0;i<n_pls;i++) {
1988 if (!pls[i]->init_section)
1989 continue;
1990
1991 if (av_strcasecmp(pls[i]->init_section->url, url) ||
1992 pls[i]->init_section->url_offset != url_offset ||
1993 pls[i]->init_section->size != size) {
1994 return 0;
1995 }
1996 }
1997 return 1;
1998 }
1999
copy_init_section(struct representation * rep_dest,struct representation * rep_src)2000 static int copy_init_section(struct representation *rep_dest, struct representation *rep_src)
2001 {
2002 rep_dest->init_sec_buf = av_mallocz(rep_src->init_sec_buf_size);
2003 if (!rep_dest->init_sec_buf) {
2004 av_log(rep_dest->ctx, AV_LOG_WARNING, "Cannot alloc memory for init_sec_buf\n");
2005 return AVERROR(ENOMEM);
2006 }
2007 memcpy(rep_dest->init_sec_buf, rep_src->init_sec_buf, rep_src->init_sec_data_len);
2008 rep_dest->init_sec_buf_size = rep_src->init_sec_buf_size;
2009 rep_dest->init_sec_data_len = rep_src->init_sec_data_len;
2010 rep_dest->cur_timestamp = rep_src->cur_timestamp;
2011
2012 return 0;
2013 }
2014
move_metadata(AVStream * st,const char * key,char ** value)2015 static void move_metadata(AVStream *st, const char *key, char **value)
2016 {
2017 if (*value) {
2018 av_dict_set(&st->metadata, key, *value, AV_DICT_DONT_STRDUP_VAL);
2019 *value = NULL;
2020 }
2021 }
2022
dash_read_header(AVFormatContext * s)2023 static int dash_read_header(AVFormatContext *s)
2024 {
2025 DASHContext *c = s->priv_data;
2026 struct representation *rep;
2027 AVProgram *program;
2028 int ret = 0;
2029 int stream_index = 0;
2030 int i;
2031
2032 c->interrupt_callback = &s->interrupt_callback;
2033
2034 if ((ret = ffio_copy_url_options(s->pb, &c->avio_opts)) < 0)
2035 return ret;
2036
2037 if ((ret = parse_manifest(s, s->url, s->pb)) < 0)
2038 return ret;
2039
2040 /* If this isn't a live stream, fill the total duration of the
2041 * stream. */
2042 if (!c->is_live) {
2043 s->duration = (int64_t) c->media_presentation_duration * AV_TIME_BASE;
2044 } else {
2045 av_dict_set(&c->avio_opts, "seekable", "0", 0);
2046 }
2047
2048 if(c->n_videos)
2049 c->is_init_section_common_video = is_common_init_section_exist(c->videos, c->n_videos);
2050
2051 /* Open the demuxer for video and audio components if available */
2052 for (i = 0; i < c->n_videos; i++) {
2053 rep = c->videos[i];
2054 if (i > 0 && c->is_init_section_common_video) {
2055 ret = copy_init_section(rep, c->videos[0]);
2056 if (ret < 0)
2057 return ret;
2058 }
2059 ret = open_demux_for_component(s, rep);
2060
2061 if (ret)
2062 return ret;
2063 rep->stream_index = stream_index;
2064 ++stream_index;
2065 }
2066
2067 if(c->n_audios)
2068 c->is_init_section_common_audio = is_common_init_section_exist(c->audios, c->n_audios);
2069
2070 for (i = 0; i < c->n_audios; i++) {
2071 rep = c->audios[i];
2072 if (i > 0 && c->is_init_section_common_audio) {
2073 ret = copy_init_section(rep, c->audios[0]);
2074 if (ret < 0)
2075 return ret;
2076 }
2077 ret = open_demux_for_component(s, rep);
2078
2079 if (ret)
2080 return ret;
2081 rep->stream_index = stream_index;
2082 ++stream_index;
2083 }
2084
2085 if (c->n_subtitles)
2086 c->is_init_section_common_subtitle = is_common_init_section_exist(c->subtitles, c->n_subtitles);
2087
2088 for (i = 0; i < c->n_subtitles; i++) {
2089 rep = c->subtitles[i];
2090 if (i > 0 && c->is_init_section_common_subtitle) {
2091 ret = copy_init_section(rep, c->subtitles[0]);
2092 if (ret < 0)
2093 return ret;
2094 }
2095 ret = open_demux_for_component(s, rep);
2096
2097 if (ret)
2098 return ret;
2099 rep->stream_index = stream_index;
2100 ++stream_index;
2101 }
2102
2103 if (!stream_index)
2104 return AVERROR_INVALIDDATA;
2105
2106 /* Create a program */
2107 program = av_new_program(s, 0);
2108 if (!program)
2109 return AVERROR(ENOMEM);
2110
2111 for (i = 0; i < c->n_videos; i++) {
2112 rep = c->videos[i];
2113 av_program_add_stream_index(s, 0, rep->stream_index);
2114 rep->assoc_stream = s->streams[rep->stream_index];
2115 if (rep->bandwidth > 0)
2116 av_dict_set_int(&rep->assoc_stream->metadata, "variant_bitrate", rep->bandwidth, 0);
2117 move_metadata(rep->assoc_stream, "id", &rep->id);
2118 }
2119 for (i = 0; i < c->n_audios; i++) {
2120 rep = c->audios[i];
2121 av_program_add_stream_index(s, 0, rep->stream_index);
2122 rep->assoc_stream = s->streams[rep->stream_index];
2123 if (rep->bandwidth > 0)
2124 av_dict_set_int(&rep->assoc_stream->metadata, "variant_bitrate", rep->bandwidth, 0);
2125 move_metadata(rep->assoc_stream, "id", &rep->id);
2126 move_metadata(rep->assoc_stream, "language", &rep->lang);
2127 }
2128 for (i = 0; i < c->n_subtitles; i++) {
2129 rep = c->subtitles[i];
2130 av_program_add_stream_index(s, 0, rep->stream_index);
2131 rep->assoc_stream = s->streams[rep->stream_index];
2132 move_metadata(rep->assoc_stream, "id", &rep->id);
2133 move_metadata(rep->assoc_stream, "language", &rep->lang);
2134 }
2135
2136 return 0;
2137 }
2138
recheck_discard_flags(AVFormatContext * s,struct representation ** p,int n)2139 static void recheck_discard_flags(AVFormatContext *s, struct representation **p, int n)
2140 {
2141 int i, j;
2142
2143 for (i = 0; i < n; i++) {
2144 struct representation *pls = p[i];
2145 int needed = !pls->assoc_stream || pls->assoc_stream->discard < AVDISCARD_ALL;
2146
2147 if (needed && !pls->ctx) {
2148 pls->cur_seg_offset = 0;
2149 pls->init_sec_buf_read_offset = 0;
2150 /* Catch up */
2151 for (j = 0; j < n; j++) {
2152 pls->cur_seq_no = FFMAX(pls->cur_seq_no, p[j]->cur_seq_no);
2153 }
2154 reopen_demux_for_component(s, pls);
2155 av_log(s, AV_LOG_INFO, "Now receiving stream_index %d\n", pls->stream_index);
2156 } else if (!needed && pls->ctx) {
2157 close_demux_for_component(pls);
2158 ff_format_io_close(pls->parent, &pls->input);
2159 av_log(s, AV_LOG_INFO, "No longer receiving stream_index %d\n", pls->stream_index);
2160 }
2161 }
2162 }
2163
dash_read_packet(AVFormatContext * s,AVPacket * pkt)2164 static int dash_read_packet(AVFormatContext *s, AVPacket *pkt)
2165 {
2166 DASHContext *c = s->priv_data;
2167 int ret = 0, i;
2168 int64_t mints = 0;
2169 struct representation *cur = NULL;
2170 struct representation *rep = NULL;
2171
2172 recheck_discard_flags(s, c->videos, c->n_videos);
2173 recheck_discard_flags(s, c->audios, c->n_audios);
2174 recheck_discard_flags(s, c->subtitles, c->n_subtitles);
2175
2176 for (i = 0; i < c->n_videos; i++) {
2177 rep = c->videos[i];
2178 if (!rep->ctx)
2179 continue;
2180 if (!cur || rep->cur_timestamp < mints) {
2181 cur = rep;
2182 mints = rep->cur_timestamp;
2183 }
2184 }
2185 for (i = 0; i < c->n_audios; i++) {
2186 rep = c->audios[i];
2187 if (!rep->ctx)
2188 continue;
2189 if (!cur || rep->cur_timestamp < mints) {
2190 cur = rep;
2191 mints = rep->cur_timestamp;
2192 }
2193 }
2194
2195 for (i = 0; i < c->n_subtitles; i++) {
2196 rep = c->subtitles[i];
2197 if (!rep->ctx)
2198 continue;
2199 if (!cur || rep->cur_timestamp < mints) {
2200 cur = rep;
2201 mints = rep->cur_timestamp;
2202 }
2203 }
2204
2205 if (!cur) {
2206 return AVERROR_INVALIDDATA;
2207 }
2208 while (!ff_check_interrupt(c->interrupt_callback) && !ret) {
2209 ret = av_read_frame(cur->ctx, pkt);
2210 if (ret >= 0) {
2211 /* If we got a packet, return it */
2212 cur->cur_timestamp = av_rescale(pkt->pts, (int64_t)cur->ctx->streams[0]->time_base.num * 90000, cur->ctx->streams[0]->time_base.den);
2213 pkt->stream_index = cur->stream_index;
2214 return 0;
2215 }
2216 if (cur->is_restart_needed) {
2217 cur->cur_seg_offset = 0;
2218 cur->init_sec_buf_read_offset = 0;
2219 ff_format_io_close(cur->parent, &cur->input);
2220 ret = reopen_demux_for_component(s, cur);
2221 cur->is_restart_needed = 0;
2222 }
2223 }
2224 return AVERROR_EOF;
2225 }
2226
dash_close(AVFormatContext * s)2227 static int dash_close(AVFormatContext *s)
2228 {
2229 DASHContext *c = s->priv_data;
2230 free_audio_list(c);
2231 free_video_list(c);
2232 free_subtitle_list(c);
2233 av_dict_free(&c->avio_opts);
2234 av_freep(&c->base_url);
2235 return 0;
2236 }
2237
dash_seek(AVFormatContext * s,struct representation * pls,int64_t seek_pos_msec,int flags,int dry_run)2238 static int dash_seek(AVFormatContext *s, struct representation *pls, int64_t seek_pos_msec, int flags, int dry_run)
2239 {
2240 int ret = 0;
2241 int i = 0;
2242 int j = 0;
2243 int64_t duration = 0;
2244
2245 av_log(pls->parent, AV_LOG_VERBOSE, "DASH seek pos[%"PRId64"ms] %s\n",
2246 seek_pos_msec, dry_run ? " (dry)" : "");
2247
2248 // single fragment mode
2249 if (pls->n_fragments == 1) {
2250 pls->cur_timestamp = 0;
2251 pls->cur_seg_offset = 0;
2252 if (dry_run)
2253 return 0;
2254 ff_read_frame_flush(pls->ctx);
2255 return av_seek_frame(pls->ctx, -1, seek_pos_msec * 1000, flags);
2256 }
2257
2258 ff_format_io_close(pls->parent, &pls->input);
2259
2260 // find the nearest fragment
2261 if (pls->n_timelines > 0 && pls->fragment_timescale > 0) {
2262 int64_t num = pls->first_seq_no;
2263 av_log(pls->parent, AV_LOG_VERBOSE, "dash_seek with SegmentTimeline start n_timelines[%d] "
2264 "last_seq_no[%"PRId64"].\n",
2265 (int)pls->n_timelines, (int64_t)pls->last_seq_no);
2266 for (i = 0; i < pls->n_timelines; i++) {
2267 if (pls->timelines[i]->starttime > 0) {
2268 duration = pls->timelines[i]->starttime;
2269 }
2270 duration += pls->timelines[i]->duration;
2271 if (seek_pos_msec < ((duration * 1000) / pls->fragment_timescale)) {
2272 goto set_seq_num;
2273 }
2274 for (j = 0; j < pls->timelines[i]->repeat; j++) {
2275 duration += pls->timelines[i]->duration;
2276 num++;
2277 if (seek_pos_msec < ((duration * 1000) / pls->fragment_timescale)) {
2278 goto set_seq_num;
2279 }
2280 }
2281 num++;
2282 }
2283
2284 set_seq_num:
2285 pls->cur_seq_no = num > pls->last_seq_no ? pls->last_seq_no : num;
2286 av_log(pls->parent, AV_LOG_VERBOSE, "dash_seek with SegmentTimeline end cur_seq_no[%"PRId64"].\n",
2287 (int64_t)pls->cur_seq_no);
2288 } else if (pls->fragment_duration > 0) {
2289 pls->cur_seq_no = pls->first_seq_no + ((seek_pos_msec * pls->fragment_timescale) / pls->fragment_duration) / 1000;
2290 } else {
2291 av_log(pls->parent, AV_LOG_ERROR, "dash_seek missing timeline or fragment_duration\n");
2292 pls->cur_seq_no = pls->first_seq_no;
2293 }
2294 pls->cur_timestamp = 0;
2295 pls->cur_seg_offset = 0;
2296 pls->init_sec_buf_read_offset = 0;
2297 ret = dry_run ? 0 : reopen_demux_for_component(s, pls);
2298
2299 return ret;
2300 }
2301
dash_read_seek(AVFormatContext * s,int stream_index,int64_t timestamp,int flags)2302 static int dash_read_seek(AVFormatContext *s, int stream_index, int64_t timestamp, int flags)
2303 {
2304 int ret = 0, i;
2305 DASHContext *c = s->priv_data;
2306 int64_t seek_pos_msec = av_rescale_rnd(timestamp, 1000,
2307 s->streams[stream_index]->time_base.den,
2308 flags & AVSEEK_FLAG_BACKWARD ?
2309 AV_ROUND_DOWN : AV_ROUND_UP);
2310 if ((flags & AVSEEK_FLAG_BYTE) || c->is_live)
2311 return AVERROR(ENOSYS);
2312
2313 /* Seek in discarded streams with dry_run=1 to avoid reopening them */
2314 for (i = 0; i < c->n_videos; i++) {
2315 if (!ret)
2316 ret = dash_seek(s, c->videos[i], seek_pos_msec, flags, !c->videos[i]->ctx);
2317 }
2318 for (i = 0; i < c->n_audios; i++) {
2319 if (!ret)
2320 ret = dash_seek(s, c->audios[i], seek_pos_msec, flags, !c->audios[i]->ctx);
2321 }
2322 for (i = 0; i < c->n_subtitles; i++) {
2323 if (!ret)
2324 ret = dash_seek(s, c->subtitles[i], seek_pos_msec, flags, !c->subtitles[i]->ctx);
2325 }
2326
2327 return ret;
2328 }
2329
dash_probe(const AVProbeData * p)2330 static int dash_probe(const AVProbeData *p)
2331 {
2332 if (!av_stristr(p->buf, "<MPD"))
2333 return 0;
2334
2335 if (av_stristr(p->buf, "dash:profile:isoff-on-demand:2011") ||
2336 av_stristr(p->buf, "dash:profile:isoff-live:2011") ||
2337 av_stristr(p->buf, "dash:profile:isoff-live:2012") ||
2338 av_stristr(p->buf, "dash:profile:isoff-main:2011") ||
2339 av_stristr(p->buf, "3GPP:PSS:profile:DASH1")) {
2340 return AVPROBE_SCORE_MAX;
2341 }
2342 if (av_stristr(p->buf, "dash:profile")) {
2343 return AVPROBE_SCORE_MAX;
2344 }
2345
2346 return 0;
2347 }
2348
2349 #define OFFSET(x) offsetof(DASHContext, x)
2350 #define FLAGS AV_OPT_FLAG_DECODING_PARAM
2351 static const AVOption dash_options[] = {
2352 {"allowed_extensions", "List of file extensions that dash is allowed to access",
2353 OFFSET(allowed_extensions), AV_OPT_TYPE_STRING,
2354 {.str = "aac,m4a,m4s,m4v,mov,mp4,webm,ts"},
2355 INT_MIN, INT_MAX, FLAGS},
2356 { "cenc_decryption_key", "Media decryption key (hex)", OFFSET(cenc_decryption_key), AV_OPT_TYPE_STRING, {.str = NULL}, INT_MIN, INT_MAX, .flags = FLAGS },
2357 {NULL}
2358 };
2359
2360 static const AVClass dash_class = {
2361 .class_name = "dash",
2362 .item_name = av_default_item_name,
2363 .option = dash_options,
2364 .version = LIBAVUTIL_VERSION_INT,
2365 };
2366
2367 const AVInputFormat ff_dash_demuxer = {
2368 .name = "dash",
2369 .long_name = NULL_IF_CONFIG_SMALL("Dynamic Adaptive Streaming over HTTP"),
2370 .priv_class = &dash_class,
2371 .priv_data_size = sizeof(DASHContext),
2372 .flags_internal = FF_FMT_INIT_CLEANUP,
2373 .read_probe = dash_probe,
2374 .read_header = dash_read_header,
2375 .read_packet = dash_read_packet,
2376 .read_close = dash_close,
2377 .read_seek = dash_read_seek,
2378 .flags = AVFMT_NO_BYTE_SEEK,
2379 };
2380