New HLS, DASH and MSS adaptive demuxer elements
authorEdward Hervey <edward@centricular.com>
Fri, 11 Mar 2022 16:11:50 +0000 (17:11 +0100)
committerGStreamer Marge Bot <gitlab-merge-bot@gstreamer-foundation.org>
Mon, 18 Apr 2022 14:11:23 +0000 (14:11 +0000)
This provides new HLS, DASH and MSS adaptive demuxer elements as a single plugin.

These elements offer many improvements over the legacy elements. They will only
work within a streams-aware context (`urisourcebin`, `uridecodebin3`,
`decodebin3`, `playbin3`, ...).

Stream selection and buffering is handled internally, this allows them to
directly manage the elementary streams and stream selection.

Authors:
* Edward Hervey <edward@centricular.com>
* Jan Schmidt <jan@centricular.com>
* Piotrek Brzeziński <piotr@centricular.com>
* Tim-Philipp Müller <tim@centricular.com>

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/2117>

101 files changed:
subprojects/gst-docs/markdown/additional/design/adaptive-demuxer.md [new file with mode: 0644]
subprojects/gst-plugins-good/docs/gst_plugins_cache.json
subprojects/gst-plugins-good/docs/meson.build
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstdash_debug.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstdashdemux.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstdashdemux.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdadaptationsetnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdadaptationsetnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdbaseurlnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdbaseurlnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdclient.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdclient.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdcontentcomponentnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdcontentcomponentnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpddescriptortypenode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpddescriptortypenode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdhelper.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdhelper.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdlocationnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdlocationnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsrangenode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsrangenode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmultsegmentbasenode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmultsegmentbasenode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdparser.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdparser.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdperiodnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdperiodnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdprograminformationnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdprograminformationnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdreportingnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdreportingnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationbasenode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationbasenode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrootnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrootnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentbasenode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentbasenode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentlistnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentlistnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttemplatenode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttemplatenode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttimelinenode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttimelinenode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenturlnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenturlnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubrepresentationnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubrepresentationnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubsetnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubsetnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdurltypenode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdurltypenode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdutctimingnode.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdutctimingnode.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstxmlhelper.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstxmlhelper.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/downloadhelper.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/downloadhelper.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/downloadrequest.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/downloadrequest.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-period.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-private.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-stream.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-track.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemuxutils.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemuxutils.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/gstisoff.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/gstisoff.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlsdemux-util.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlsdemux.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlsdemux.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlselement.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlselements.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/hls/m3u8.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/hls/m3u8.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/hls/meson.build [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/meson.build [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssdemux.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssdemux.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssfragmentparser.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssfragmentparser.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssmanifest.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssmanifest.h [new file with mode: 0644]
subprojects/gst-plugins-good/ext/adaptivedemux2/plugin.c [new file with mode: 0644]
subprojects/gst-plugins-good/ext/meson.build
subprojects/gst-plugins-good/ext/soup/gstsouploader.c
subprojects/gst-plugins-good/ext/soup/gstsouploader.h
subprojects/gst-plugins-good/meson_options.txt
subprojects/gst-plugins-good/tests/check/elements/dash_mpd.c [new file with mode: 0644]
subprojects/gst-plugins-good/tests/check/elements/hlsdemux_m3u8.c [new file with mode: 0644]
subprojects/gst-plugins-good/tests/check/meson.build

diff --git a/subprojects/gst-docs/markdown/additional/design/adaptive-demuxer.md b/subprojects/gst-docs/markdown/additional/design/adaptive-demuxer.md
new file mode 100644 (file)
index 0000000..c373cee
--- /dev/null
@@ -0,0 +1,319 @@
+# Adaptive Demuxers v2
+
+The existing adaptive demuxer support in `gst-plugins-bad` has several pitfalls
+that prevents improving it easily. The existing design uses a model where an
+adaptive streaming element (`dashdemux`, `hlsdemux`) downloads multiplexed
+fragments of media, but then relies on other components in the pipeline to
+provide download buffering, demuxing, elementary stream handling and decoding.
+
+
+The problems with the old design include:
+
+1. An assumption that fragment streams (to download) are equal to output
+   (elementary) streams.
+
+   * This made it hard to expose `GstStream` and `GstStreamCollection`
+     describing the available media streams, and by extension it is difficult to
+     provide efficient stream selection support
+
+2. By performing download buffering outside the adaptive streaming element,
+   the download scheduling has no visibility into the presentation timeline.
+
+   * This made it impossible to handle more efficient variant selection and
+     download strategy
+
+3. Several issues with establishing accurate timing/duration of fragments due to
+   not dealing with parsed data
+
+   * Especially with HLS, which does not provide detailed timing information
+     about the underlying media streams to the same extent that DASH does.
+
+4. Aging design that grew organically since initial adaptive demuxers and miss
+   better understanding of how they should work in 2020
+
+   * The code is complicated and interwoven in ways that are hard to follow
+     and reason about.
+
+5. Use of GStreamer pipeline sources for downloading.
+
+   * An internal download pipeline that contains a `httpsrc -> queue2 -> src`
+     chain makes download management, bandwidth estimation and stream parsing
+     more difficult, and uses a new thread for each download.
+
+
+# New design
+
+## High-level overview of the new AdaptiveDemux base class:
+
+* Buffering is handled inside the adaptive streaming element, based on
+  elementary streams (i.e. de-multiplexed from the downloaded fragments) and
+  stored inside the `adaptivedemux`-based element.
+
+* The download strategy has full visibility on bitrates, bandwidth, per-stream
+  queueing level (in time and bytes), playback position, etc. This opens up the
+  possibility of much more intelligent adaptive download strategies.
+
+* Output pads are not handled directly by the subclasses. Instead subclasses
+  specify which `tracks` of elementary streams they can provide and what
+  "download streams" can provide contents for those tracks. The baseclass
+  handles usage and activation of the `tracks` based on application
+  `select-streams` requests, and activation of the `stream` needed to feed each
+  selected `track`.
+
+* Output is done from a single thread, with the various elementary streams
+  packets being output in time order (i.e. behaving like a regular demuxer, with
+  interleaving reduced to its minimum). There is minimal buffering downstream
+  in the pipeline - only the amount required to perform decode and display.
+
+* The adaptive streaming element only exposes `src` pads for the selected
+  `GstStream`s. Typically, there will be one video track, one audio track and
+  perhaps one subtitle track exposed at a time, for example.
+
+* Stream selection is handled by the element directly. When switching on a
+  new media stream, the output to the relevant source pad is switched once
+  there is enough content buffered on the newly requested stream,
+  providing rapid and seamless stream switching.
+
+* Only 3 threads are used regardless of the number of streams/tracks. One is
+  dedicated to download, one for output, and one for scheduling and feeding
+  contents to the tracks.
+
+
+The main components of the new adaptive demuxers are:
+
+* `GstAdaptiveDemuxTrack` : end-user meaningful elementary streams. Those can be
+  selected by the user. They are provided by the subclasses based on the
+  manifest.
+  
+  * They each correspond to a `GstStream` of a `GstStreamCollection`
+  * They are unique by `GstStreamType` and any other unique identifier specified
+    by the manifest (ex: language)
+  * The caps *can* change through time
+
+* `OutputSlot` : A track being exposed via a source pad. This is handled by the
+  parent class.
+
+* `GstAdaptiveDemuxStream` : implementation-specific download stream. This is
+  linked to one or more `GstAdaptiveDemuxTrack`. The contents of that stream
+  will be parsed (via `parsebin`) and fed to the target tracks.
+  
+  * What tracks are provided by a given `GstAdaptiveDemuxStream` is specified by
+    the subclass. But can also be discovered at runtime if the manifest did not
+    provide enough information (for example with HLS).
+
+* Download thread : Receives download requests from the scheduling thread that
+  can be queried and interrupted. Performs all download servicing in a
+  single dedicated thread that can estimate download bandwidth across all
+  simultaneous requests.
+
+* Scheduling thread : In charge of deciding what new downloads should be started
+  based on overall position, track buffering levels, selected tracks, current
+  time ... It is also in charge of handling completed downloads. Fragment
+  downloads are sent to dedicated `parsebin` elements that feed the parsed
+  elementary data to `GstAdaptiveDemuxTrack`
+
+* Output thread : In charge of deciding which track should be
+  outputted/removed/switched (via `OutputSlot`) based on requested selection and
+  track levels. 
+
+
+## Track(s) and Stream(s)
+
+Adaptive Demuxers provide one or more *Track* of elementary streams. They are
+each unique in terms of:
+
+* Their type (audio, video, text, ..). Ex : `GST_STREAM_TYPE_AUDIO`
+* Optional: Their codec. Ex : `video/x-h264`
+* Optional: Their language. ex : `GST_TAG_LANGUAGE : "fr"`
+* Optional: Their number of channels (ex: stereo vs 5.1). ex
+  `audio/x-vorbis,channels=2`
+* Any other feature which would make the stream "unique" either because of their
+  nature (ex: video angle) or specified by the manifest as being "unique".
+
+But tracks can vary over time by:
+
+* bitrate
+* profile or level
+* resolution
+
+They correspond to what an end-user might want to select (i.e. will be exposed
+in a `GstStreamCollection`). They are each identified by a `stream_id` provided
+by the subclass.
+
+> **Note:** A manifest *can* specify that tracks that would normally be separate
+> based on the above rules (for example different codecs or channels) are
+> actually the same "end-user selectable stream" (i.e. track). In such case only
+> one track is provided and the nature of the elementary stream can change
+> through time.
+
+Adaptive Demuxers subclasses also need to provide one or more *Download Stream*
+(`GstAdaptiveDemuxStream`) which are the implementation-/manifest-specific
+"streams" that each feed one or more *Track*. Those streams can also vary over
+time by bitrate/profile/resolution/... but always target the same tracks.
+
+The downloaded data from each of those `GstAdaptiveDemuxStream` is fed to a
+`parsebin` element which will put the output in the associated
+`GstAdaptiveDemuxTrack`.
+
+The tracks have some buffering capability, handled by the baseclass.
+
+
+This separation allows the base-class to:
+
+* decide which download stream(s) should be (de)activated based on the current
+  track selection
+* decide when to (re)start download requests based on buffering levels, positions and
+  external actions.
+* Handle buffering, output and stream selection.
+
+The subclass is responsible for deciding:
+
+* *Which* next download should be requested for that stream based on current
+  playback position, the provided encoded bitrates, estimates of download
+  bandwidth, buffering levels, etc..
+
+
+Subclasses can also decide, before passing the downloaded data over, to:
+
+* pre-parse specific headers (ex: ID3 and webvtt headers in HLS, MP4 fragment
+  position, etc..).
+
+* pre-parse actual content if needed because a position estimation is needed
+  (ex: HLS missing accurate positioning of fragments in the overall timeline)
+
+* rewrite the content altogether (for example webvtt fragments which require
+  timing to be re-computed)
+
+
+## Timeline, position, playout
+
+Adaptive Demuxers decide what to download based on a *Timeline* made of one or
+more *Tracks*.
+
+The output of that *Timeline* is synchronized (each *Track* pushes downstream at
+more or less the same position in time). That position is the "Global Output
+Position".
+
+The *Timeline* should have sufficient data in each track to allow all tracks to
+be decoded and played back downstream without introducing stalls. It is the goal
+of the *Scheduling thread* of adaptive demuxers to determine which fragment of
+data to download and at which moment, in order for:
+
+* each track to have sufficient data for continuous playback downstream
+* the overall buffering to not exceed specified limits (in time and/or bytes)
+* the playback position to not stray away in case of live sources and
+  low-latency scenarios.
+
+Which *Track* is selected on that *Timeline* is either:
+
+ * decided by the element (default choices)
+ * decided by the user (via `GST_EVENT_SELECT_STREAMS`)
+
+The goal of an Adaptive Demuxer is to establish *which* fragment to download and
+*when* based on:
+
+* The selected *Tracks*
+* The current *Timeline* output position
+* The current *Track* download position (i.e. how much is buffered)
+* The available bandwidth (calculated based on download speed)
+* The bitrate of each fragment stream provided
+* The current time (for live sources)
+
+In the future, an Adaptive Demuxer will be able to decide to discard a fragment
+if it estimates it can switch to a higher/lower variant in time to still satisfy
+the above requirements.
+
+
+## Download helper and thread
+
+Based on the above, each Adaptive Demuxer implementation specifies to a
+*Download Loop* which fragment to download next and when.
+
+Multiple downloads can be requested at the same time on that thread. It is the
+responsibility of the *Scheduler thread* to decide what to do when a download is
+completed.
+
+Since all downloads are done in a dedicated thread without any blocking, it can
+estimate current bandwidth and latency, which the element can use to switch
+variants and improve buffering strategy.
+
+> **Note**: Unlike the old design, the `libsoup` library is used directly for
+> downloading, and not via external GStreamer elements. In the future, this
+> could be made modular so that other HTTP libraries can be used instead.
+
+
+## Stream Selection
+
+When sending `GST_EVENT_STREAM_COLLECTION` downstream, the adaptive demuxer also
+specifies on the event that it can handle stream-selection. Downstream elements
+(i.e. `decodebin3`) won't attempt to do any selection but will
+handle/decode/expose all the streams provided by the adaptive demuxer (including
+streams that get added/removed at runtime).
+
+When handling a `GST_EVENT_SELECT_STREAMS`, the adaptive demuxer will:
+
+* mark the requested tracks as `selected` (and no-longer requested ones as not
+selected)
+* instruct the streams associated to no-longer selected tracks to stop
+* set the current output position on streams associated to newly selected
+  tracks and instruct them to be started
+* return
+
+The actual changes in output (because of a stream selection change) are done in
+the output thread
+
+* If a track is no longer selected and there are no candidate replacement tracks
+  of the same type, the associated output/pad is removed and the track is
+  drained.
+
+* If a track is selected and doesn't have a candidate replacement slot of the
+  same type, a new output/pad is added for that track
+
+* If a track is selected and has a candidate replacement slot, it will only be
+  switched if the track it is replacing is empty *OR* when it has enough
+  buffering so the switch can happen without re-triggering buffering.
+
+## Periods
+
+The number and type of `GstAdaptiveDemuxTrack` and `GstAdaptiveDemuxStream` can
+not change once the initial manifests are parsed.
+
+In order to change that (for example in the case of a new DASH period), a new
+`GstAdaptiveDemuxPeriod` must be started.
+
+All the tracks and streams that are created at any given time are associated to
+the current `input period`. The streams of the input period are the ones that
+are active (i.e. downloading), and by extension the tracks of that input period
+are the ones that are being filled (if selected).
+
+That period *could* also be the `output period`. The (selected) tracks of that
+period are the ones that are used for output by the output thread.
+
+But due to buffering, the input and output period *could* be different, the
+baseclass will automatically handle switch over.
+
+The only requirement for subclasses is to ask the parent class to start a new
+period when needed and then create the new tracks and streams.
+
+
+## Responsibility split
+
+The `GstAdaptiveDemux2` base class is in charge of:
+
+* helper for all downloads.
+* helper for parsing (using `parsebin` and custom parsing functions) stream data.
+* provides *parsed* elementary content for each fragment (note: could be more
+  than one output stream for a given fragment)
+* helper for providing `Tracks` that can be filled by subclasses.
+* dealing with stream selection and output, including notifying subclasses which
+  of those *are* active or not
+* handling buffering and deciding when to request new data from associated stream
+
+Subclasses are in charge of:
+
+* specifying which `GstAdaptiveDemuxTrack` and `GstAdaptiveDemuxStream` they
+  provide (based on the manifest) and their relationship.
+* when requested by the base class, specify which `GstAdaptiveDemuxFragment`
+  should be downloaded next for a given (selected) stream.
+
+
index 602f08e..4552b0d 100644 (file)
         "tracers": {},
         "url": "Unknown package origin"
     },
+    "adaptivedemux2": {
+        "description": "Adaptive Streaming 2 plugin",
+        "elements": {
+            "dashdemux2": {
+                "author": "Edward Hervey <edward@centricular.com>\nJan Schmidt <jan@centricular.com>",
+                "description": "Dynamic Adaptive Streaming over HTTP demuxer",
+                "hierarchy": [
+                    "GstDashDemux2",
+                    "GstAdaptiveDemux2",
+                    "GstBin",
+                    "GstElement",
+                    "GstObject",
+                    "GInitiallyUnowned",
+                    "GObject"
+                ],
+                "interfaces": [
+                    "GstChildProxy"
+                ],
+                "klass": "Codec/Demuxer/Adaptive",
+                "long-name": "DASH Demuxer",
+                "pad-templates": {
+                    "audio_%%02u": {
+                        "caps": "ANY",
+                        "direction": "src",
+                        "presence": "sometimes"
+                    },
+                    "sink": {
+                        "caps": "application/dash+xml:\n",
+                        "direction": "sink",
+                        "presence": "always"
+                    },
+                    "subtitle_%%02u": {
+                        "caps": "ANY",
+                        "direction": "src",
+                        "presence": "sometimes"
+                    },
+                    "video_%%02u": {
+                        "caps": "ANY",
+                        "direction": "src",
+                        "presence": "sometimes"
+                    }
+                },
+                "properties": {
+                    "max-bitrate": {
+                        "blurb": "Max of bitrate supported by target video decoder (0 = no maximum)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "-1",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "guint",
+                        "writable": true
+                    },
+                    "max-video-framerate": {
+                        "blurb": "Max video framerate to select (0/1 = no maximum)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0/1",
+                        "max": "2147483647/1",
+                        "min": "0/1",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "GstFraction",
+                        "writable": true
+                    },
+                    "max-video-height": {
+                        "blurb": "Max video height to select (0 = no maximum)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "-1",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "guint",
+                        "writable": true
+                    },
+                    "max-video-width": {
+                        "blurb": "Max video width to select (0 = no maximum)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "-1",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "guint",
+                        "writable": true
+                    },
+                    "presentation-delay": {
+                        "blurb": "Default presentation delay (in seconds, milliseconds or fragments) (e.g. 12s, 2500ms, 3f)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "10s",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "gchararray",
+                        "writable": true
+                    }
+                },
+                "rank": "primary + 1"
+            },
+            "hlsdemux2": {
+                "author": "Edward Hervey <edward@centricular.com>\nJan Schmidt <jan@centricular.com>",
+                "description": "HTTP Live Streaming demuxer",
+                "hierarchy": [
+                    "GstHLSDemux2",
+                    "GstAdaptiveDemux2",
+                    "GstBin",
+                    "GstElement",
+                    "GstObject",
+                    "GInitiallyUnowned",
+                    "GObject"
+                ],
+                "interfaces": [
+                    "GstChildProxy"
+                ],
+                "klass": "Codec/Demuxer/Adaptive",
+                "long-name": "HLS Demuxer",
+                "pad-templates": {
+                    "audio_%%02u": {
+                        "caps": "ANY",
+                        "direction": "src",
+                        "presence": "sometimes"
+                    },
+                    "sink": {
+                        "caps": "application/x-hls:\n",
+                        "direction": "sink",
+                        "presence": "always"
+                    },
+                    "subtitle_%%02u": {
+                        "caps": "ANY",
+                        "direction": "src",
+                        "presence": "sometimes"
+                    },
+                    "video_%%02u": {
+                        "caps": "ANY",
+                        "direction": "src",
+                        "presence": "sometimes"
+                    }
+                },
+                "properties": {
+                    "start-bitrate": {
+                        "blurb": "Initial bitrate to use to choose first alternate (0 = automatic) (bits/s)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "-1",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "guint",
+                        "writable": true
+                    }
+                },
+                "rank": "primary + 1"
+            },
+            "mssdemux2": {
+                "author": "Thiago Santos <thiago.sousa.santos@collabora.com>",
+                "description": "Parse and demultiplex a Smooth Streaming manifest into audio and video streams",
+                "hierarchy": [
+                    "GstMssDemux2",
+                    "GstAdaptiveDemux2",
+                    "GstBin",
+                    "GstElement",
+                    "GstObject",
+                    "GInitiallyUnowned",
+                    "GObject"
+                ],
+                "interfaces": [
+                    "GstChildProxy"
+                ],
+                "klass": "Codec/Demuxer/Adaptive",
+                "long-name": "Smooth Streaming demuxer (v2)",
+                "pad-templates": {
+                    "audio_%%02u": {
+                        "caps": "ANY",
+                        "direction": "src",
+                        "presence": "sometimes"
+                    },
+                    "sink": {
+                        "caps": "application/vnd.ms-sstr+xml:\n",
+                        "direction": "sink",
+                        "presence": "always"
+                    },
+                    "subtitle_%%02u": {
+                        "caps": "ANY",
+                        "direction": "src",
+                        "presence": "sometimes"
+                    },
+                    "video_%%02u": {
+                        "caps": "ANY",
+                        "direction": "src",
+                        "presence": "sometimes"
+                    }
+                },
+                "rank": "primary + 1"
+            }
+        },
+        "filename": "gstadaptivedemux2",
+        "license": "LGPL",
+        "other-types": {
+            "GstAdaptiveDemux2": {
+                "hierarchy": [
+                    "GstAdaptiveDemux2",
+                    "GstBin",
+                    "GstElement",
+                    "GstObject",
+                    "GInitiallyUnowned",
+                    "GObject"
+                ],
+                "interfaces": [
+                    "GstChildProxy"
+                ],
+                "kind": "object",
+                "properties": {
+                    "bandwidth-target-ratio": {
+                        "blurb": "Limit of the available bitrate to use when switching to alternates",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0.8",
+                        "max": "1",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "gfloat",
+                        "writable": true
+                    },
+                    "connection-bitrate": {
+                        "blurb": "Network connection speed to use (0 = automatic) (bits/s)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "-1",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "guint",
+                        "writable": true
+                    },
+                    "connection-speed": {
+                        "blurb": "Network connection speed to use in kbps (0 = calculate from downloaded fragments)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "4294967",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "guint",
+                        "writable": true
+                    },
+                    "current-bandwidth": {
+                        "blurb": "Report of current download bandwidth (based on arriving data) (bits/s)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "-1",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "guint",
+                        "writable": false
+                    },
+                    "current-level-time-audio": {
+                        "blurb": "Currently buffered level of audio track(s) (ns)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "18446744073709551615",
+                        "min": "0",
+                        "mutable": "playing",
+                        "readable": true,
+                        "type": "guint64",
+                        "writable": false
+                    },
+                    "current-level-time-video": {
+                        "blurb": "Currently buffered level of video track(s) (ns)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "18446744073709551615",
+                        "min": "0",
+                        "mutable": "playing",
+                        "readable": true,
+                        "type": "guint64",
+                        "writable": false
+                    },
+                    "high-watermark-fragments": {
+                        "blurb": "High watermark for parsed data above which downloads are paused (in fragments, 0=disable)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "3.40282e+38",
+                        "min": "0",
+                        "mutable": "playing",
+                        "readable": true,
+                        "type": "gdouble",
+                        "writable": true
+                    },
+                    "high-watermark-time": {
+                        "blurb": "High watermark for parsed data above which downloads are paused (in ns, 0=disable)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "30000000000",
+                        "max": "18446744073709551615",
+                        "min": "0",
+                        "mutable": "playing",
+                        "readable": true,
+                        "type": "guint64",
+                        "writable": true
+                    },
+                    "low-watermark-fragments": {
+                        "blurb": "Low watermark for parsed data below which downloads are resumed (in fragments, 0=disable)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "3.40282e+38",
+                        "min": "0",
+                        "mutable": "playing",
+                        "readable": true,
+                        "type": "gdouble",
+                        "writable": true
+                    },
+                    "low-watermark-time": {
+                        "blurb": "Low watermark for parsed data below which downloads are resumed (in ns, 0=disable)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "3000000000",
+                        "max": "18446744073709551615",
+                        "min": "0",
+                        "mutable": "playing",
+                        "readable": true,
+                        "type": "guint64",
+                        "writable": true
+                    },
+                    "max-bitrate": {
+                        "blurb": "Maximum bitrate to use when switching to alternates (bits/s)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "-1",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "guint",
+                        "writable": true
+                    },
+                    "max-buffering-time": {
+                        "blurb": "Upper limit on the high watermark for parsed data, above which downloads are paused (in ns, 0=disable)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "30000000000",
+                        "max": "18446744073709551615",
+                        "min": "0",
+                        "mutable": "playing",
+                        "readable": true,
+                        "type": "guint64",
+                        "writable": true
+                    },
+                    "min-bitrate": {
+                        "blurb": "Minimum bitrate to use when switching to alternates (bits/s)",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "-1",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "guint",
+                        "writable": true
+                    }
+                }
+            }
+        },
+        "package": "GStreamer Good Plug-ins",
+        "source": "gst-plugins-good",
+        "tracers": {},
+        "url": "Unknown package origin"
+    },
     "alaw": {
         "description": "ALaw audio conversion routines",
         "elements": {
index 83c3a27..6480e4e 100644 (file)
@@ -95,6 +95,7 @@ foreach plugin_name: list_plugin_res.stdout().split(':')
       gst_c_sources: [
         '../sys/*/*.[cmh]',
         '../ext/*/*.[ch]',
+        '../ext/*/*/*.[ch]',
         '../gst/*/*.[ch]',
       ],
       gst_c_source_filters: excludes,
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstdash_debug.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstdash_debug.h
new file mode 100644 (file)
index 0000000..c358e0c
--- /dev/null
@@ -0,0 +1,13 @@
+#ifndef __GST_DASH_DEBUG_H__
+#define __GST_DASH_DEBUG_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+GST_DEBUG_CATEGORY_EXTERN (gst_dash_demux2_debug);
+
+G_END_DECLS
+
+#endif /* __GST_DASH_DEBUG_H__ */
+
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstdashdemux.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstdashdemux.c
new file mode 100644 (file)
index 0000000..c142d2b
--- /dev/null
@@ -0,0 +1,3969 @@
+/*
+ * DASH demux plugin for GStreamer
+ *
+ * gstdashdemux.c
+ *
+ * Copyright (C) 2012 Orange
+ *
+ * Authors:
+ *   David Corvoysier <david.corvoysier@orange.com>
+ *   Hamid Zakari <hamid.zakari@gmail.com>
+ *
+ * Copyright (C) 2013 Smart TV Alliance
+ *  Author: Thiago Sousa Santos <thiago.sousa.santos@collabora.com>, Collabora Ltd.
+ *
+ * Copyright (C) 2021-2022 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+/**
+ * SECTION:element-dashdemux2
+ * @title: dashdemux2
+ *
+ * DASH demuxer element.
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 playbin3 uri="http://www-itec.uni-klu.ac.at/ftp/datasets/mmsys12/RedBullPlayStreets/redbull_4s/RedBullPlayStreets_4s_isoffmain_DIS_23009_1_v_2_1c2_2011_08_30.mpd"
+ * ]|
+ */
+
+/* Implementation notes:
+ *
+ * The following section describes how dashdemux works internally.
+ *
+ * Introduction:
+ *
+ * dashdemux is a "fake" demux, as unlike traditional demux elements, it
+ * doesn't split data streams contained in an envelope to expose them to
+ * downstream decoding elements.
+ *
+ * Instead, it parses an XML file called a manifest to identify a set of
+ * individual stream fragments it needs to fetch and expose to the actual demux
+ * elements (handled by the base `adaptivedemux2` class) that will handle them.
+ *
+ * For a given section of content, several representations corresponding
+ * to different bitrates may be available: dashdemux will select the most
+ * appropriate representation based on local conditions (typically the
+ * available bandwidth and the amount of buffering available, capped by
+ * a maximum allowed bitrate).
+ *
+ * The representation selection algorithm can be configured using
+ * specific properties: max bitrate, min/max buffering, bandwidth ratio.
+ *
+ *
+ * General Design:
+ *
+ * dashdemux will be provided with the data corresponding to the manifest,
+ * typically fetched from an HTTP or file source.
+ *
+ * dashdemux exposes the streams it recreates based on the fragments it fetches
+ * through dedicated GstAdaptiveDemux2Stream (corresponding to download streams).
+ * It also specifies the characteristics of the "elementary streams" provided by
+ * those "download streams" via "tracks" (GstAdaptiveDemuxTrack).
+ *
+ * During playback, new representations will typically be exposed as a
+ * new set of pads (see 'Switching between representations' below).
+ *
+ * Fragments downloading is performed using a dedicated task that fills
+ * an internal queue. Another task is in charge of popping fragments
+ * from the queue and pushing them downstream.
+ *
+ * Switching between representations:
+ *
+ * Decodebin supports scenarios allowing to seamlessly switch from one
+ * stream to another inside the same "decoding chain".
+ *
+ * To achieve that, it combines the elements it autoplugged in chains
+ *  and groups, allowing only one decoding group to be active at a given
+ * time for a given chain.
+ *
+ * A chain can signal decodebin that it is complete by sending a
+ * no-more-pads event, but even after that new pads can be added to
+ * create new subgroups, providing that a new no-more-pads event is sent.
+ *
+ * We take advantage of that to dynamically create a new decoding group
+ * in order to select a different representation during playback.
+ *
+ * Typically, assuming that each fragment contains both audio and video,
+ * the following tree would be created:
+ *
+ * chain "DASH Demux"
+ * |_ group "Representation set 1"
+ * |   |_ chain "Qt Demux 0"
+ * |       |_ group "Stream 0"
+ * |           |_ chain "H264"
+ * |           |_ chain "AAC"
+ * |_ group "Representation set 2"
+ *     |_ chain "Qt Demux 1"
+ *         |_ group "Stream 1"
+ *             |_ chain "H264"
+ *             |_ chain "AAC"
+ *
+ * Or, if audio and video are contained in separate fragments:
+ *
+ * chain "DASH Demux"
+ * |_ group "Representation set 1"
+ * |   |_ chain "Qt Demux 0"
+ * |   |   |_ group "Stream 0"
+ * |   |       |_ chain "H264"
+ * |   |_ chain "Qt Demux 1"
+ * |       |_ group "Stream 1"
+ * |           |_ chain "AAC"
+ * |_ group "Representation set 2"
+ *     |_ chain "Qt Demux 3"
+ *     |   |_ group "Stream 2"
+ *     |       |_ chain "H264"
+ *     |_ chain "Qt Demux 4"
+ *         |_ group "Stream 3"
+ *             |_ chain "AAC"
+ *
+ * In both cases, when switching from Set 1 to Set 2 an EOS is sent on
+ * each end pad corresponding to Rep 0, triggering the "drain" state to
+ * propagate upstream.
+ * Once both EOS have been processed, the "Set 1" group is completely
+ * drained, and decodebin2 will switch to the "Set 2" group.
+ *
+ * Note: nothing can be pushed to the new decoding group before the
+ * old one has been drained, which means that in order to be able to
+ * adapt quickly to bandwidth changes, we will not be able to rely
+ * on downstream buffering, and will instead manage an internal queue.
+ *
+ *
+ * Keyframe trick-mode implementation:
+ *
+ * When requested (with GST_SEEK_FLAG_TRICKMODE_KEY_UNIT) and if the format
+ * is supported (ISOBMFF profiles), dashdemux can download only keyframes
+ * in order to provide fast forward/reverse playback without exceeding the
+ * available bandwidth/cpu/memory usage.
+ *
+ * This is done in two parts:
+ * 1) Parsing ISOBMFF atoms to detect the location of keyframes and only
+ *    download/push those.
+ * 2) Deciding what the ideal next keyframe to download is in order to
+ *    provide as many keyframes as possible without rebuffering.
+ *
+ * * Keyframe-only downloads:
+ *
+ * For each beginning of fragment, the fragment header will be parsed in
+ * gst_dash_demux_parse_isobmff() and then the information (offset, pts...)
+ * of each keyframe will be stored in moof_sync_samples.
+ *
+ * gst_dash_demux_stream_update_fragment_info() will specify the range
+ * start and end of the current keyframe, which will cause GstAdaptiveDemux
+ * to do a new upstream range request.
+ *
+ * When advancing, if there are still some keyframes in the current
+ * fragment, gst_dash_demux_stream_advance_fragment() will call
+ * gst_dash_demux_stream_advance_sync_sample() which decides what the next
+ * keyframe to get will be (it can be in reverse order for example, or
+ * might not be the *next* keyframe but one further as explained below).
+ *
+ * If no more keyframes are available in the current fragment, dash will
+ * advance to the next fragment (just like in the normal case) or to a
+ * fragment much further away (as explained below).
+ *
+ *
+ * * Deciding the optimal "next" keyframe/fragment to download:
+ *
+ * The main reason for doing keyframe-only downloads is for trick-modes
+ * (i.e. being able to do fast reverse/forward playback with limited
+ * bandwidth/cpu/memory).
+ *
+ * Downloading all keyframes might not be the optimal solution, especially
+ * at high playback rates, since the time taken to download the keyframe
+ * might exceed the available running time between two displayed frames
+ * (i.e. all frames would end up arriving late). This would cause severe
+ * rebuffering.
+ *
+ * Note: The values specified below can be in either the segment running
+ * time or in absolute values. Where position values need to be converted
+ * to segment running time the "running_time(val)" notation is used, and
+ * where running time need ot be converted to segment poisition the
+ * "position(val)" notation is used.
+ *
+ * The goal instead is to be able to download/display as many frames as
+ * possible for a given playback rate. For that the implementation will
+ * take into account:
+ *  * The requested playback rate and segment
+ *  * The average time to request and download a keyframe (in running time)
+ *  * The current position of dashdemux in the stream
+ *  * The current downstream (i.e. sink) position (in running time)
+ *
+ * To reach this goal we consider that there is some amount of buffering
+ * (in time) between dashdemux and the display sink. While we do not know
+ * the exact amount of buffering available, a safe and reasonable assertion
+ * is that there is at least a second (in running time).
+ *
+ * The average time to request and fully download a keyframe (with or
+ * without fragment header) is obtained by averaging the
+ * GstAdaptiveDemux2Stream->last_download_time and is stored in
+ * GstDashDemux2Stream->average_download_time. Those values include the
+ * network latency and full download time, which are more interesting and
+ * correct than just bitrates (with small download sizes, the impact of the
+ * network latency is much higher).
+ *
+ * The current position is calculated based on the fragment timestamp and
+ * the current keyframe index within that fragment. It is stored in
+ * GstDashDemux2Stream->actual_position.
+ *
+ * The downstream position of the pipeline is obtained via QoS events and
+ * is stored in GstAdaptiveDemux (note: it's a running time value).
+ *
+ * The estimated buffering level between dashdemux and downstream is
+ * therefore:
+ *   buffering_level = running_time(actual_position) - qos_earliest_time
+ *
+ * In order to avoid rebuffering, we want to ensure that the next keyframe
+ * (including potential fragment header) we request will be download, demuxed
+ * and decoded in time so that it is not late. That next keyframe time is
+ * called the "target_time" and is calculated whenever we have finished
+ * pushing a keyframe downstream.
+ *
+ * One simple observation at this point is that we *need* to make sure that
+ * the target time is chosen such that:
+ *   running_time(target_time) > qos_earliest_time + average_download_time
+ *
+ * i.e. we chose a target time which will be greater than the time at which
+ * downstream will be once we request and download the keyframe (otherwise
+ * we're guaranteed to be late).
+ *
+ * This would provide the highest number of displayed frames per
+ * second, but it is just a *minimal* value and is not enough as-is,
+ * since it doesn't take into account the following items which could
+ * cause frames to arrive late (and therefore rebuffering):
+ * * Network jitter (i.e. by how much the download time can fluctuate)
+ * * Network stalling
+ * * Different keyframe sizes (and therefore download time)
+ * * Decoding speed
+ *
+ * Instead, we adjust the target time calculation based on the
+ * buffering_level.
+ *
+ * The smaller the buffering level is (i.e. the closer we are between
+ * current and downstream), the more aggressively we skip forward (and
+ * guarantee the keyframe will be downloaded, decoded and displayed in
+ * time). And the higher the buffering level, the least aggresivelly
+ * we need to skip forward (and therefore display more frames per
+ * second).
+ *
+ * Right now the threshold for aggressive switching is set to 3
+ * average_download_time. Below that buffering level we set the target time
+ * to at least 3 average_download_time distance beyond the
+ * qos_earliest_time.
+ *
+ * If we are above that buffering level we set the target time to:
+ *      position(running_time(position) + average_download_time)
+ *
+ * The logic is therefore:
+ * WHILE(!EOS)
+ *   Calculate target_time
+ *   Advance to keyframe/fragment for that target_time
+ *   Adaptivedemux downloads that keyframe/fragment
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#  include "config.h"
+#endif
+
+#include <string.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <inttypes.h>
+#include <gio/gio.h>
+#include <gst/base/gsttypefindhelper.h>
+#include <gst/tag/tag.h>
+#include <gst/net/gstnet.h>
+
+#include "gstdashdemux.h"
+#include "gstdash_debug.h"
+
+static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+    GST_PAD_SINK,
+    GST_PAD_ALWAYS,
+    GST_STATIC_CAPS ("application/dash+xml"));
+
+GST_DEBUG_CATEGORY (gst_dash_demux2_debug);
+#define GST_CAT_DEFAULT gst_dash_demux2_debug
+
+enum
+{
+  PROP_0,
+  PROP_MAX_VIDEO_WIDTH,
+  PROP_MAX_VIDEO_HEIGHT,
+  PROP_MAX_VIDEO_FRAMERATE,
+  PROP_PRESENTATION_DELAY,
+  PROP_LAST
+};
+
+/* Default values for properties */
+#define DEFAULT_MAX_VIDEO_WIDTH           0
+#define DEFAULT_MAX_VIDEO_HEIGHT          0
+#define DEFAULT_MAX_VIDEO_FRAMERATE_N     0
+#define DEFAULT_MAX_VIDEO_FRAMERATE_D     1
+#define DEFAULT_PRESENTATION_DELAY     "10s"    /* 10s */
+
+/* Clock drift compensation for live streams */
+#define SLOW_CLOCK_UPDATE_INTERVAL  (1000000 * 30 * 60) /* 30 minutes */
+#define FAST_CLOCK_UPDATE_INTERVAL  (1000000 * 30)      /* 30 seconds */
+#define SUPPORTED_CLOCK_FORMATS (GST_MPD_UTCTIMING_TYPE_NTP | GST_MPD_UTCTIMING_TYPE_HTTP_HEAD | GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE | GST_MPD_UTCTIMING_TYPE_HTTP_ISO | GST_MPD_UTCTIMING_TYPE_HTTP_NTP)
+#define NTP_TO_UNIX_EPOCH G_GUINT64_CONSTANT(2208988800)        /* difference (in seconds) between NTP epoch and Unix epoch */
+
+struct _GstDashDemux2ClockDrift
+{
+  GMutex clock_lock;            /* used to protect access to struct */
+  GstMPDUTCTimingType method;
+  guint selected_url;
+  gint64 next_update;
+  /* @clock_compensation: amount (in usecs) to add to client's idea of
+     now to map it to the server's idea of now */
+  GTimeSpan clock_compensation;
+  GstClock *ntp_clock;
+};
+
+typedef struct
+{
+  guint64 start_offset, end_offset;
+  /* TODO: Timestamp and duration */
+} GstDashStreamSyncSample;
+
+/* GObject */
+static void gst_dash_demux_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec);
+static void gst_dash_demux_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec);
+static void gst_dash_demux_dispose (GObject * obj);
+
+/* GstAdaptiveDemux */
+static GstClockTime gst_dash_demux_get_duration (GstAdaptiveDemux * ademux);
+static gboolean gst_dash_demux_is_live (GstAdaptiveDemux * ademux);
+static void gst_dash_demux_reset (GstAdaptiveDemux * ademux);
+static gboolean gst_dash_demux_process_manifest (GstAdaptiveDemux * ademux,
+    GstBuffer * buf);
+static gboolean gst_dash_demux_seek (GstAdaptiveDemux * demux, GstEvent * seek);
+static GstFlowReturn
+gst_dash_demux_stream_update_fragment_info (GstAdaptiveDemux2Stream * stream);
+static GstFlowReturn gst_dash_demux_stream_seek (GstAdaptiveDemux2Stream *
+    stream, gboolean forward, GstSeekFlags flags, GstClockTimeDiff ts,
+    GstClockTimeDiff * final_ts);
+static gboolean gst_dash_demux_stream_has_next_fragment (GstAdaptiveDemux2Stream
+    * stream);
+static GstFlowReturn
+gst_dash_demux_stream_advance_fragment (GstAdaptiveDemux2Stream * stream);
+static gboolean
+gst_dash_demux_stream_advance_subfragment (GstAdaptiveDemux2Stream * stream);
+static gboolean gst_dash_demux_stream_select_bitrate (GstAdaptiveDemux2Stream *
+    stream, guint64 bitrate);
+static gint64 gst_dash_demux_get_manifest_update_interval (GstAdaptiveDemux *
+    demux);
+static GstFlowReturn gst_dash_demux_update_manifest_data (GstAdaptiveDemux *
+    demux, GstBuffer * buf);
+static GstClockTime
+gst_dash_demux_stream_get_fragment_waiting_time (GstAdaptiveDemux2Stream *
+    stream);
+static void gst_dash_demux_advance_period (GstAdaptiveDemux * demux);
+static gboolean gst_dash_demux_has_next_period (GstAdaptiveDemux * demux);
+static GstFlowReturn gst_dash_demux_data_received (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer);
+static gboolean
+gst_dash_demux_stream_fragment_start (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+static GstFlowReturn
+gst_dash_demux_stream_fragment_finished (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+static gboolean gst_dash_demux_need_another_chunk (GstAdaptiveDemux2Stream *
+    stream);
+
+/* GstDashDemux2 */
+static gboolean gst_dash_demux_setup_all_streams (GstDashDemux2 * demux);
+
+static GstCaps *gst_dash_demux_get_input_caps (GstDashDemux2 * demux,
+    GstActiveStream * stream);
+static GstDashDemux2ClockDrift *gst_dash_demux_clock_drift_new (GstDashDemux2 *
+    demux);
+static void gst_dash_demux_clock_drift_free (GstDashDemux2ClockDrift *);
+static void gst_dash_demux_poll_clock_drift (GstDashDemux2 * demux);
+static GTimeSpan gst_dash_demux_get_clock_compensation (GstDashDemux2 * demux);
+static GDateTime *gst_dash_demux_get_server_now_utc (GstDashDemux2 * demux);
+
+#define SIDX(s) (&(s)->sidx_parser.sidx)
+
+static inline GstSidxBoxEntry *
+SIDX_ENTRY (GstDashDemux2Stream * s, gint i)
+{
+  g_assert (i < SIDX (s)->entries_count);
+  return &(SIDX (s)->entries[(i)]);
+}
+
+#define SIDX_CURRENT_ENTRY(s) SIDX_ENTRY(s, SIDX(s)->entry_index)
+
+static void gst_dash_demux_send_content_protection_event (gpointer cp_data,
+    gpointer stream);
+
+#define gst_dash_demux_stream_parent_class stream_parent_class
+G_DEFINE_TYPE (GstDashDemux2Stream, gst_dash_demux_stream,
+    GST_TYPE_ADAPTIVE_DEMUX2_STREAM);
+
+static void
+gst_dash_demux_stream_init (GstDashDemux2Stream * stream)
+{
+  stream->adapter = gst_adapter_new ();
+  stream->pending_seek_ts = GST_CLOCK_TIME_NONE;
+  stream->sidx_position = GST_CLOCK_TIME_NONE;
+  stream->actual_position = GST_CLOCK_TIME_NONE;
+  stream->target_time = GST_CLOCK_TIME_NONE;
+
+  stream->first_sync_sample_always_after_moof = TRUE;
+
+  /* Set a default average keyframe download time of a quarter of a second */
+  stream->average_download_time = 250 * GST_MSECOND;
+
+  gst_isoff_sidx_parser_init (&stream->sidx_parser);
+}
+
+static void
+gst_dash_demux_stream_finalize (GObject * object)
+{
+  GstDashDemux2Stream *dash_stream = (GstDashDemux2Stream *) object;
+  if (dash_stream->track) {
+    gst_adaptive_demux_track_unref (dash_stream->track);
+    dash_stream->track = NULL;
+  }
+
+  gst_isoff_sidx_parser_clear (&dash_stream->sidx_parser);
+  if (dash_stream->adapter)
+    g_object_unref (dash_stream->adapter);
+  if (dash_stream->moof)
+    gst_isoff_moof_box_free (dash_stream->moof);
+  if (dash_stream->moof_sync_samples)
+    g_array_free (dash_stream->moof_sync_samples, TRUE);
+
+  G_OBJECT_CLASS (stream_parent_class)->finalize (object);
+}
+
+static void
+gst_dash_demux_stream_class_init (GstDashDemux2StreamClass * klass)
+{
+  GObjectClass *gobject_class = (GObjectClass *) klass;
+
+  gobject_class->finalize = gst_dash_demux_stream_finalize;
+}
+
+
+#define gst_dash_demux2_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstDashDemux2, gst_dash_demux2,
+    GST_TYPE_ADAPTIVE_DEMUX, GST_DEBUG_CATEGORY_INIT (gst_dash_demux2_debug,
+        "dashdemux2", 0, "dashdemux2 element")
+    );
+
+GST_ELEMENT_REGISTER_DEFINE (dashdemux2, "dashdemux2",
+    GST_RANK_PRIMARY + 1, GST_TYPE_DASH_DEMUX2);
+
+static void
+gst_dash_demux_dispose (GObject * obj)
+{
+  GstDashDemux2 *demux = GST_DASH_DEMUX (obj);
+
+  gst_dash_demux_reset (GST_ADAPTIVE_DEMUX_CAST (demux));
+
+  if (demux->client) {
+    gst_mpd_client2_free (demux->client);
+    demux->client = NULL;
+  }
+
+  g_mutex_clear (&demux->client_lock);
+
+  gst_dash_demux_clock_drift_free (demux->clock_drift);
+  demux->clock_drift = NULL;
+  g_free (demux->default_presentation_delay);
+  G_OBJECT_CLASS (parent_class)->dispose (obj);
+}
+
+static gboolean
+gst_dash_demux_get_live_seek_range (GstAdaptiveDemux * demux, gint64 * start,
+    gint64 * stop)
+{
+  GstDashDemux2 *self = GST_DASH_DEMUX (demux);
+  GDateTime *now;
+  GDateTime *mstart;
+  GTimeSpan stream_now;
+  GstClockTime seg_duration;
+
+  if (self->client->mpd_root_node->availabilityStartTime == NULL)
+    return FALSE;
+
+  seg_duration = gst_mpd_client2_get_maximum_segment_duration (self->client);
+  now = gst_dash_demux_get_server_now_utc (self);
+  mstart =
+      gst_date_time_to_g_date_time (self->client->mpd_root_node->
+      availabilityStartTime);
+  stream_now = g_date_time_difference (now, mstart);
+  g_date_time_unref (now);
+  g_date_time_unref (mstart);
+
+  if (stream_now <= 0)
+    return FALSE;
+
+  *stop = stream_now * GST_USECOND;
+  if (self->client->mpd_root_node->timeShiftBufferDepth ==
+      GST_MPD_DURATION_NONE) {
+    *start = 0;
+  } else {
+    *start =
+        *stop -
+        (self->client->mpd_root_node->timeShiftBufferDepth * GST_MSECOND);
+    if (*start < 0)
+      *start = 0;
+  }
+
+  /* As defined in 5.3.9.5.3 of the DASH specification, a segment does
+     not become available until the sum of:
+     * the value of the MPD@availabilityStartTime,
+     * the PeriodStart time of the containing Period
+     * the MPD start time of the Media Segment, and
+     * the MPD duration of the Media Segment.
+     Therefore we need to subtract the media segment duration from the stop
+     time.
+   */
+  *stop -= seg_duration;
+  return TRUE;
+}
+
+static GstClockTime
+gst_dash_demux_get_presentation_offset (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+
+  return gst_mpd_client2_get_stream_presentation_offset (dashdemux->client,
+      dashstream->index);
+}
+
+static GstClockTime
+gst_dash_demux_get_period_start_time (GstAdaptiveDemux * demux)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+
+  return gst_mpd_client2_get_period_start_time (dashdemux->client);
+}
+
+static void
+gst_dash_demux2_class_init (GstDashDemux2Class * klass)
+{
+  GObjectClass *gobject_class;
+  GstElementClass *gstelement_class;
+  GstAdaptiveDemuxClass *gstadaptivedemux_class;
+
+  gobject_class = (GObjectClass *) klass;
+  gstelement_class = (GstElementClass *) klass;
+  gstadaptivedemux_class = (GstAdaptiveDemuxClass *) klass;
+
+  gobject_class->set_property = gst_dash_demux_set_property;
+  gobject_class->get_property = gst_dash_demux_get_property;
+  gobject_class->dispose = gst_dash_demux_dispose;
+
+  g_object_class_install_property (gobject_class, PROP_MAX_VIDEO_WIDTH,
+      g_param_spec_uint ("max-video-width", "Max video width",
+          "Max video width to select (0 = no maximum)",
+          0, G_MAXUINT, DEFAULT_MAX_VIDEO_WIDTH,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_MAX_VIDEO_HEIGHT,
+      g_param_spec_uint ("max-video-height", "Max video height",
+          "Max video height to select (0 = no maximum)",
+          0, G_MAXUINT, DEFAULT_MAX_VIDEO_HEIGHT,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_MAX_VIDEO_FRAMERATE,
+      gst_param_spec_fraction ("max-video-framerate", "Max video framerate",
+          "Max video framerate to select (0/1 = no maximum)",
+          0, 1, G_MAXINT, 1, DEFAULT_MAX_VIDEO_FRAMERATE_N,
+          DEFAULT_MAX_VIDEO_FRAMERATE_D,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_PRESENTATION_DELAY,
+      g_param_spec_string ("presentation-delay", "Presentation delay",
+          "Default presentation delay (in seconds, milliseconds or fragments) (e.g. 12s, 2500ms, 3f)",
+          DEFAULT_PRESENTATION_DELAY,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
+
+  gst_element_class_set_static_metadata (gstelement_class,
+      "DASH Demuxer",
+      "Codec/Demuxer/Adaptive",
+      "Dynamic Adaptive Streaming over HTTP demuxer",
+      "Edward Hervey <edward@centricular.com>\n"
+      "Jan Schmidt <jan@centricular.com>");
+
+
+  gstadaptivedemux_class->get_duration = gst_dash_demux_get_duration;
+  gstadaptivedemux_class->is_live = gst_dash_demux_is_live;
+  gstadaptivedemux_class->reset = gst_dash_demux_reset;
+  gstadaptivedemux_class->seek = gst_dash_demux_seek;
+
+  gstadaptivedemux_class->process_manifest = gst_dash_demux_process_manifest;
+  gstadaptivedemux_class->update_manifest_data =
+      gst_dash_demux_update_manifest_data;
+  gstadaptivedemux_class->get_manifest_update_interval =
+      gst_dash_demux_get_manifest_update_interval;
+
+  gstadaptivedemux_class->has_next_period = gst_dash_demux_has_next_period;
+  gstadaptivedemux_class->advance_period = gst_dash_demux_advance_period;
+  gstadaptivedemux_class->stream_has_next_fragment =
+      gst_dash_demux_stream_has_next_fragment;
+  gstadaptivedemux_class->stream_advance_fragment =
+      gst_dash_demux_stream_advance_fragment;
+  gstadaptivedemux_class->stream_get_fragment_waiting_time =
+      gst_dash_demux_stream_get_fragment_waiting_time;
+  gstadaptivedemux_class->stream_seek = gst_dash_demux_stream_seek;
+  gstadaptivedemux_class->stream_select_bitrate =
+      gst_dash_demux_stream_select_bitrate;
+  gstadaptivedemux_class->stream_update_fragment_info =
+      gst_dash_demux_stream_update_fragment_info;
+  gstadaptivedemux_class->get_live_seek_range =
+      gst_dash_demux_get_live_seek_range;
+  gstadaptivedemux_class->get_presentation_offset =
+      gst_dash_demux_get_presentation_offset;
+  gstadaptivedemux_class->get_period_start_time =
+      gst_dash_demux_get_period_start_time;
+
+  gstadaptivedemux_class->start_fragment = gst_dash_demux_stream_fragment_start;
+  gstadaptivedemux_class->finish_fragment =
+      gst_dash_demux_stream_fragment_finished;
+  gstadaptivedemux_class->data_received = gst_dash_demux_data_received;
+  gstadaptivedemux_class->need_another_chunk =
+      gst_dash_demux_need_another_chunk;
+}
+
+static void
+gst_dash_demux2_init (GstDashDemux2 * demux)
+{
+  /* Properties */
+  demux->max_video_width = DEFAULT_MAX_VIDEO_WIDTH;
+  demux->max_video_height = DEFAULT_MAX_VIDEO_HEIGHT;
+  demux->max_video_framerate_n = DEFAULT_MAX_VIDEO_FRAMERATE_N;
+  demux->max_video_framerate_d = DEFAULT_MAX_VIDEO_FRAMERATE_D;
+  demux->default_presentation_delay = g_strdup (DEFAULT_PRESENTATION_DELAY);
+
+  g_mutex_init (&demux->client_lock);
+}
+
+static void
+gst_dash_demux_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstDashDemux2 *demux = GST_DASH_DEMUX (object);
+
+  switch (prop_id) {
+    case PROP_MAX_VIDEO_WIDTH:
+      demux->max_video_width = g_value_get_uint (value);
+      break;
+    case PROP_MAX_VIDEO_HEIGHT:
+      demux->max_video_height = g_value_get_uint (value);
+      break;
+    case PROP_MAX_VIDEO_FRAMERATE:
+      demux->max_video_framerate_n = gst_value_get_fraction_numerator (value);
+      demux->max_video_framerate_d = gst_value_get_fraction_denominator (value);
+      break;
+    case PROP_PRESENTATION_DELAY:
+      g_free (demux->default_presentation_delay);
+      demux->default_presentation_delay = g_value_dup_string (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_dash_demux_get_property (GObject * object, guint prop_id, GValue * value,
+    GParamSpec * pspec)
+{
+  GstDashDemux2 *demux = GST_DASH_DEMUX (object);
+
+  switch (prop_id) {
+    case PROP_MAX_VIDEO_WIDTH:
+      g_value_set_uint (value, demux->max_video_width);
+      break;
+    case PROP_MAX_VIDEO_HEIGHT:
+      g_value_set_uint (value, demux->max_video_height);
+      break;
+    case PROP_MAX_VIDEO_FRAMERATE:
+      gst_value_set_fraction (value, demux->max_video_framerate_n,
+          demux->max_video_framerate_d);
+      break;
+    case PROP_PRESENTATION_DELAY:
+      if (demux->default_presentation_delay == NULL)
+        g_value_set_static_string (value, "");
+      else
+        g_value_set_string (value, demux->default_presentation_delay);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static gboolean
+gst_dash_demux_setup_mpdparser_streams (GstDashDemux2 * demux,
+    GstMPDClient2 * client)
+{
+  gboolean has_streams = FALSE;
+  GList *adapt_sets, *iter;
+
+  adapt_sets = gst_mpd_client2_get_adaptation_sets (client);
+  for (iter = adapt_sets; iter; iter = g_list_next (iter)) {
+    GstMPDAdaptationSetNode *adapt_set_node = iter->data;
+
+    if (gst_mpd_client2_setup_streaming (client, adapt_set_node))
+      has_streams = TRUE;
+  }
+
+  if (!has_streams) {
+    GST_ELEMENT_ERROR (demux, STREAM, DEMUX, ("Manifest has no playable "
+            "streams"), ("No streams could be activated from the manifest"));
+  }
+  return has_streams;
+}
+
+static GstStreamType
+gst_dash_demux_get_stream_type (GstDashDemux2 * demux, GstActiveStream * stream)
+{
+  switch (stream->mimeType) {
+    case GST_STREAM_AUDIO:
+      return GST_STREAM_TYPE_AUDIO;
+    case GST_STREAM_VIDEO:
+      return GST_STREAM_TYPE_VIDEO;
+    case GST_STREAM_APPLICATION:
+      if (gst_mpd_client2_active_stream_contains_subtitles (stream))
+        return GST_STREAM_TYPE_TEXT;
+      /* fallthrough */
+    default:
+      g_assert_not_reached ();
+      return GST_STREAM_TYPE_UNKNOWN;
+  }
+}
+
+static GstDashDemux2Stream *
+gst_dash_demux_stream_new (guint period_num, gchar * stream_id)
+{
+  GstDashDemux2Stream *stream;
+  gchar *name =
+      g_strdup_printf ("dashstream-period%d-%s", period_num, stream_id);
+
+  stream = g_object_new (GST_TYPE_DASH_DEMUX_STREAM, "name", name, NULL);
+
+  g_free (name);
+
+  return stream;
+}
+
+static gboolean
+gst_dash_demux_setup_all_streams (GstDashDemux2 * demux)
+{
+  GstAdaptiveDemux *parent = (GstAdaptiveDemux *) demux;
+  guint i;
+
+  GST_DEBUG_OBJECT (demux, "Setting up streams for period %d",
+      gst_mpd_client2_get_period_index (demux->client));
+
+  /* clean old active stream list, if any */
+  gst_mpd_client2_active_streams_free (demux->client);
+
+  if (!gst_dash_demux_setup_mpdparser_streams (demux, demux->client)) {
+    return FALSE;
+  }
+
+  if (!gst_adaptive_demux_start_new_period (parent))
+    return FALSE;
+
+  GST_DEBUG_OBJECT (demux, "Creating stream objects");
+  for (i = 0; i < gst_mpd_client2_get_nb_active_stream (demux->client); i++) {
+    GstDashDemux2Stream *stream;
+    GstAdaptiveDemuxTrack *track;
+    GstStreamType streamtype;
+    GstActiveStream *active_stream;
+    GstCaps *caps, *codec_caps;
+    gchar *stream_id;
+    GstStructure *s;
+    gchar *lang = NULL;
+    GstTagList *tags = NULL;
+
+    active_stream =
+        gst_mpd_client2_get_active_stream_by_index (demux->client, i);
+    if (active_stream == NULL)
+      continue;
+
+#if 0
+    /* Porting note : No longer handled by subclasses */
+    if (demux->trickmode_no_audio
+        && active_stream->mimeType == GST_STREAM_AUDIO) {
+      GST_DEBUG_OBJECT (demux,
+          "Skipping audio stream %d because of TRICKMODE_NO_AUDIO flag", i);
+      continue;
+    }
+#endif
+
+    streamtype = gst_dash_demux_get_stream_type (demux, active_stream);
+    if (streamtype == GST_STREAM_TYPE_UNKNOWN)
+      continue;
+
+    stream_id =
+        g_strdup_printf ("%s-%d", gst_stream_type_get_name (streamtype), i);
+
+    caps = gst_dash_demux_get_input_caps (demux, active_stream);
+    codec_caps = gst_mpd_client2_get_codec_caps (active_stream);
+    GST_LOG_OBJECT (demux,
+        "Creating stream %d %" GST_PTR_FORMAT " / codec %" GST_PTR_FORMAT, i,
+        caps, codec_caps);
+
+    if (active_stream->cur_adapt_set) {
+      GstMPDAdaptationSetNode *adp_set = active_stream->cur_adapt_set;
+      lang = adp_set->lang;
+
+      /* Fallback to the language in ContentComponent node */
+      if (lang == NULL) {
+        GList *it;
+
+        for (it = adp_set->ContentComponents; it; it = it->next) {
+          GstMPDContentComponentNode *cc_node = it->data;
+          if (cc_node->lang) {
+            lang = cc_node->lang;
+            break;
+          }
+        }
+      }
+    }
+
+    if (lang) {
+      if (gst_tag_check_language_code (lang))
+        tags = gst_tag_list_new (GST_TAG_LANGUAGE_CODE, lang, NULL);
+      else
+        tags = gst_tag_list_new (GST_TAG_LANGUAGE_NAME, lang, NULL);
+    }
+
+    /* Create the track this stream provides */
+    track = gst_adaptive_demux_track_new (GST_ADAPTIVE_DEMUX_CAST (demux),
+        streamtype, GST_STREAM_FLAG_NONE, stream_id, codec_caps, tags);
+
+    stream = gst_dash_demux_stream_new (demux->client->period_idx, stream_id);
+    GST_ADAPTIVE_DEMUX2_STREAM_CAST (stream)->stream_type = streamtype;
+
+    g_free (stream_id);
+
+    gst_adaptive_demux2_add_stream (GST_ADAPTIVE_DEMUX_CAST (demux),
+        GST_ADAPTIVE_DEMUX2_STREAM_CAST (stream));
+    gst_adaptive_demux2_stream_add_track (GST_ADAPTIVE_DEMUX2_STREAM_CAST
+        (stream), track);
+    stream->track = track;
+    stream->active_stream = active_stream;
+    s = gst_caps_get_structure (caps, 0);
+    stream->allow_sidx =
+        gst_mpd_client2_has_isoff_ondemand_profile (demux->client);
+    stream->is_isobmff = gst_structure_has_name (s, "video/quicktime")
+        || gst_structure_has_name (s, "audio/x-m4a");
+    gst_adaptive_demux2_stream_set_caps (GST_ADAPTIVE_DEMUX2_STREAM_CAST
+        (stream), caps);
+    if (tags)
+      gst_adaptive_demux2_stream_set_tags (GST_ADAPTIVE_DEMUX2_STREAM_CAST
+          (stream), tags);
+    stream->index = i;
+
+    if (active_stream->cur_adapt_set &&
+        GST_MPD_REPRESENTATION_BASE_NODE (active_stream->
+            cur_adapt_set)->ContentProtection) {
+      GST_DEBUG_OBJECT (demux, "Adding ContentProtection events to source pad");
+      g_list_foreach (GST_MPD_REPRESENTATION_BASE_NODE
+          (active_stream->cur_adapt_set)->ContentProtection,
+          gst_dash_demux_send_content_protection_event, stream);
+    }
+  }
+
+  return TRUE;
+}
+
+static void
+gst_dash_demux_send_content_protection_event (gpointer data, gpointer userdata)
+{
+  GstMPDDescriptorTypeNode *cp = (GstMPDDescriptorTypeNode *) data;
+  GstDashDemux2Stream *stream = (GstDashDemux2Stream *) userdata;
+  GstAdaptiveDemux2Stream *bstream = (GstAdaptiveDemux2Stream *) userdata;
+  GstEvent *event;
+  GstBuffer *pssi;
+  glong pssi_len;
+  gchar *schemeIdUri;
+
+  if (cp->schemeIdUri == NULL)
+    return;
+
+  GST_TRACE_OBJECT (bstream, "check schemeIdUri %s", cp->schemeIdUri);
+  /* RFC 2141 states: The leading "urn:" sequence is case-insensitive */
+  schemeIdUri = g_ascii_strdown (cp->schemeIdUri, -1);
+  if (g_str_has_prefix (schemeIdUri, "urn:uuid:")) {
+    pssi_len = strlen (cp->value);
+    pssi = gst_buffer_new_wrapped (g_memdup2 (cp->value, pssi_len), pssi_len);
+    GST_LOG_OBJECT (bstream, "Queuing Protection event on source pad");
+    /* RFC 4122 states that the hex part of a UUID is in lower case,
+     * but some streams seem to ignore this and use upper case for the
+     * protection system ID */
+    event = gst_event_new_protection (cp->schemeIdUri + 9, pssi, "dash/mpd");
+    gst_adaptive_demux2_stream_queue_event ((GstAdaptiveDemux2Stream *) stream,
+        event);
+    gst_buffer_unref (pssi);
+  }
+  g_free (schemeIdUri);
+}
+
+static GstClockTime
+gst_dash_demux_get_duration (GstAdaptiveDemux * ademux)
+{
+  GstDashDemux2 *demux = GST_DASH_DEMUX_CAST (ademux);
+
+  g_return_val_if_fail (demux->client != NULL, GST_CLOCK_TIME_NONE);
+
+  return gst_mpd_client2_get_media_presentation_duration (demux->client);
+}
+
+static gboolean
+gst_dash_demux_is_live (GstAdaptiveDemux * ademux)
+{
+  GstDashDemux2 *demux = GST_DASH_DEMUX_CAST (ademux);
+
+  g_return_val_if_fail (demux->client != NULL, FALSE);
+
+  return gst_mpd_client2_is_live (demux->client);
+}
+
+static gboolean
+gst_dash_demux_setup_streams (GstAdaptiveDemux * demux)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+  gboolean ret = TRUE;
+  GstDateTime *now = NULL;
+  guint period_idx;
+
+  /* setup video, audio and subtitle streams, starting from first Period if
+   * non-live */
+  period_idx = 0;
+  if (gst_mpd_client2_is_live (dashdemux->client)) {
+    GDateTime *g_now;
+    if (dashdemux->client->mpd_root_node->availabilityStartTime == NULL) {
+      ret = FALSE;
+      GST_ERROR_OBJECT (demux, "MPD does not have availabilityStartTime");
+      goto done;
+    }
+    if (dashdemux->clock_drift == NULL) {
+      gchar **urls;
+      urls =
+          gst_mpd_client2_get_utc_timing_sources (dashdemux->client,
+          SUPPORTED_CLOCK_FORMATS, NULL);
+      if (urls) {
+        GST_DEBUG_OBJECT (dashdemux, "Found a supported UTCTiming element");
+        dashdemux->clock_drift = gst_dash_demux_clock_drift_new (dashdemux);
+        gst_dash_demux_poll_clock_drift (dashdemux);
+      }
+    }
+    /* get period index for period encompassing the current time */
+    g_now = gst_dash_demux_get_server_now_utc (dashdemux);
+    now = gst_date_time_new_from_g_date_time (g_now);
+    if (dashdemux->client->mpd_root_node->suggestedPresentationDelay != -1) {
+      GstClockTimeDiff presentation_diff =
+          -dashdemux->client->mpd_root_node->suggestedPresentationDelay *
+          GST_MSECOND;
+      GstDateTime *target =
+          gst_mpd_client2_add_time_difference (now, presentation_diff);
+      gst_date_time_unref (now);
+      now = target;
+    } else if (dashdemux->default_presentation_delay) {
+      GstClockTimeDiff dfp =
+          gst_mpd_client2_parse_default_presentation_delay (dashdemux->client,
+          dashdemux->default_presentation_delay) * GST_MSECOND;
+      GstDateTime *target = gst_mpd_client2_add_time_difference (now, -dfp);
+      gst_date_time_unref (now);
+      now = target;
+    }
+    period_idx =
+        gst_mpd_client2_get_period_index_at_time (dashdemux->client, now);
+    if (period_idx == G_MAXUINT) {
+#ifndef GST_DISABLE_GST_DEBUG
+      gchar *date_str = gst_date_time_to_iso8601_string (now);
+      GST_DEBUG_OBJECT (demux, "Unable to find live period active at %s",
+          date_str);
+      g_free (date_str);
+#endif
+      ret = FALSE;
+      goto done;
+    }
+  }
+
+  if (!gst_mpd_client2_set_period_index (dashdemux->client, period_idx) ||
+      !gst_dash_demux_setup_all_streams (dashdemux)) {
+    ret = FALSE;
+    goto done;
+  }
+
+  /* If stream is live, try to find the segment that
+   * is closest to current time */
+  if (gst_mpd_client2_is_live (dashdemux->client)) {
+    GDateTime *gnow;
+
+    GST_DEBUG_OBJECT (demux, "Seeking to current time of day for live stream ");
+
+    gnow = gst_date_time_to_g_date_time (now);
+    gst_mpd_client2_seek_to_time (dashdemux->client, gnow);
+    g_date_time_unref (gnow);
+  } else {
+    GST_DEBUG_OBJECT (demux, "Seeking to first segment for on-demand stream ");
+
+    /* start playing from the first segment */
+    gst_mpd_client2_seek_to_first_segment (dashdemux->client);
+  }
+
+done:
+  if (now != NULL)
+    gst_date_time_unref (now);
+  return ret;
+}
+
+static gboolean
+gst_dash_demux_process_manifest (GstAdaptiveDemux * demux, GstBuffer * buf)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+  gboolean ret = FALSE;
+  gchar *manifest;
+  GstMapInfo mapinfo;
+
+  if (dashdemux->client)
+    gst_mpd_client2_free (dashdemux->client);
+  dashdemux->client = gst_mpd_client2_new ();
+  gst_mpd_client2_set_download_helper (dashdemux->client,
+      demux->download_helper);
+
+  dashdemux->client->mpd_uri = g_strdup (demux->manifest_uri);
+  dashdemux->client->mpd_base_uri = g_strdup (demux->manifest_base_uri);
+
+  GST_DEBUG_OBJECT (demux, "Fetched MPD file at URI: %s (base: %s)",
+      dashdemux->client->mpd_uri,
+      GST_STR_NULL (dashdemux->client->mpd_base_uri));
+
+  if (gst_buffer_map (buf, &mapinfo, GST_MAP_READ)) {
+    manifest = (gchar *) mapinfo.data;
+    if (gst_mpd_client2_parse (dashdemux->client, manifest, mapinfo.size)) {
+      if (gst_mpd_client2_setup_media_presentation (dashdemux->client, 0, 0,
+              NULL)) {
+        ret = TRUE;
+      } else {
+        GST_ELEMENT_ERROR (demux, STREAM, DECODE,
+            ("Incompatible manifest file."), (NULL));
+      }
+    }
+    gst_buffer_unmap (buf, &mapinfo);
+  } else {
+    GST_WARNING_OBJECT (demux, "Failed to map manifest buffer");
+  }
+
+  if (ret)
+    ret = gst_dash_demux_setup_streams (demux);
+
+  return ret;
+}
+
+
+static void
+gst_dash_demux_reset (GstAdaptiveDemux * ademux)
+{
+  GstDashDemux2 *demux = GST_DASH_DEMUX_CAST (ademux);
+
+  GST_DEBUG_OBJECT (demux, "Resetting demux");
+
+  demux->end_of_period = FALSE;
+  demux->end_of_manifest = FALSE;
+
+  if (demux->client) {
+    gst_mpd_client2_free (demux->client);
+    demux->client = NULL;
+  }
+  gst_dash_demux_clock_drift_free (demux->clock_drift);
+  demux->clock_drift = NULL;
+  demux->client = gst_mpd_client2_new ();
+  gst_mpd_client2_set_download_helper (demux->client, ademux->download_helper);
+
+  demux->allow_trickmode_key_units = TRUE;
+}
+
+static GstCaps *
+gst_dash_demux_get_video_input_caps (GstDashDemux2 * demux,
+    GstActiveStream * stream)
+{
+  guint width = 0, height = 0;
+  gint fps_num = 0, fps_den = 1;
+  gboolean have_fps = FALSE;
+  GstCaps *caps = NULL;
+
+  if (stream == NULL)
+    return NULL;
+
+  /* if bitstreamSwitching is true we don't need to switch pads on resolution change */
+  if (!gst_mpd_client2_get_bitstream_switching_flag (stream)) {
+    width = gst_mpd_client2_get_video_stream_width (stream);
+    height = gst_mpd_client2_get_video_stream_height (stream);
+    have_fps =
+        gst_mpd_client2_get_video_stream_framerate (stream, &fps_num, &fps_den);
+  }
+  caps = gst_mpd_client2_get_stream_caps (stream);
+  if (caps == NULL)
+    return NULL;
+
+  if (width > 0 && height > 0) {
+    gst_caps_set_simple (caps, "width", G_TYPE_INT, width, "height",
+        G_TYPE_INT, height, NULL);
+  }
+
+  if (have_fps) {
+    gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, fps_num,
+        fps_den, NULL);
+  }
+
+  return caps;
+}
+
+static GstCaps *
+gst_dash_demux_get_audio_input_caps (GstDashDemux2 * demux,
+    GstActiveStream * stream)
+{
+  guint rate = 0, channels = 0;
+  GstCaps *caps = NULL;
+
+  if (stream == NULL)
+    return NULL;
+
+  /* if bitstreamSwitching is true we don't need to switch pads on rate/channels change */
+  if (!gst_mpd_client2_get_bitstream_switching_flag (stream)) {
+    channels = gst_mpd_client2_get_audio_stream_num_channels (stream);
+    rate = gst_mpd_client2_get_audio_stream_rate (stream);
+  }
+  caps = gst_mpd_client2_get_stream_caps (stream);
+  if (caps == NULL)
+    return NULL;
+
+  if (rate > 0) {
+    gst_caps_set_simple (caps, "rate", G_TYPE_INT, rate, NULL);
+  }
+  if (channels > 0) {
+    gst_caps_set_simple (caps, "channels", G_TYPE_INT, channels, NULL);
+  }
+
+  return caps;
+}
+
+static GstCaps *
+gst_dash_demux_get_application_input_caps (GstDashDemux2 * demux,
+    GstActiveStream * stream)
+{
+  GstCaps *caps = NULL;
+
+  if (stream == NULL)
+    return NULL;
+
+  caps = gst_mpd_client2_get_stream_caps (stream);
+  if (caps == NULL)
+    return NULL;
+
+  return caps;
+}
+
+static GstCaps *
+gst_dash_demux_get_input_caps (GstDashDemux2 * demux, GstActiveStream * stream)
+{
+  switch (stream->mimeType) {
+    case GST_STREAM_VIDEO:
+      return gst_dash_demux_get_video_input_caps (demux, stream);
+    case GST_STREAM_AUDIO:
+      return gst_dash_demux_get_audio_input_caps (demux, stream);
+    case GST_STREAM_APPLICATION:
+      return gst_dash_demux_get_application_input_caps (demux, stream);
+    default:
+      return gst_caps_copy (GST_CAPS_NONE);
+  }
+}
+
+static void
+gst_dash_demux_stream_update_headers_info (GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+  gchar *path = NULL;
+
+  gst_mpd_client2_get_next_header (dashdemux->client,
+      &path, dashstream->index,
+      &stream->fragment.header_range_start, &stream->fragment.header_range_end);
+
+  if (path != NULL) {
+    stream->fragment.header_uri =
+        gst_uri_join_strings (gst_mpd_client2_get_baseURL (dashdemux->client,
+            dashstream->index), path);
+    g_free (path);
+    path = NULL;
+  }
+
+  gst_mpd_client2_get_next_header_index (dashdemux->client,
+      &path, dashstream->index,
+      &stream->fragment.index_range_start, &stream->fragment.index_range_end);
+
+  if (path != NULL) {
+    stream->fragment.index_uri =
+        gst_uri_join_strings (gst_mpd_client2_get_baseURL (dashdemux->client,
+            dashstream->index), path);
+    g_free (path);
+  }
+}
+
+static GstFlowReturn
+gst_dash_demux_stream_update_fragment_info (GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+  GstClockTime ts;
+  GstMediaFragmentInfo fragment;
+  gboolean isombff;
+  gboolean playing_forward =
+      (GST_ADAPTIVE_DEMUX_CAST (dashdemux)->segment.rate > 0.0);
+
+  gst_adaptive_demux2_stream_fragment_clear (&stream->fragment);
+
+  isombff = gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client);
+
+  /* Reset chunk size if any */
+  stream->fragment.chunk_size = 0;
+  dashstream->current_fragment_keyframe_distance = GST_CLOCK_TIME_NONE;
+
+  if (GST_ADAPTIVE_DEMUX2_STREAM_NEED_HEADER (stream) && isombff) {
+    gst_dash_demux_stream_update_headers_info (stream);
+    /* sidx entries may not be available in here */
+    if (stream->fragment.index_uri
+        && dashstream->sidx_position != GST_CLOCK_TIME_NONE) {
+      /* request only the index to be downloaded as we need to reposition the
+       * stream to a subsegment */
+      return GST_FLOW_OK;
+    }
+  }
+
+  if (dashstream->moof_sync_samples
+      && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux)) {
+    GstDashStreamSyncSample *sync_sample =
+        &g_array_index (dashstream->moof_sync_samples, GstDashStreamSyncSample,
+        dashstream->current_sync_sample);
+
+    gst_mpd_client2_get_next_fragment (dashdemux->client, dashstream->index,
+        &fragment);
+
+    if (isombff && dashstream->sidx_position != GST_CLOCK_TIME_NONE
+        && SIDX (dashstream)->entries) {
+      GstSidxBoxEntry *entry = SIDX_CURRENT_ENTRY (dashstream);
+      dashstream->current_fragment_timestamp = fragment.timestamp = entry->pts;
+      dashstream->current_fragment_duration = fragment.duration =
+          entry->duration;
+    } else {
+      dashstream->current_fragment_timestamp = fragment.timestamp;
+      dashstream->current_fragment_duration = fragment.duration;
+    }
+
+    dashstream->current_fragment_keyframe_distance =
+        fragment.duration / dashstream->moof_sync_samples->len;
+    dashstream->actual_position =
+        fragment.timestamp +
+        dashstream->current_sync_sample *
+        dashstream->current_fragment_keyframe_distance;
+    if (!playing_forward) {
+      dashstream->actual_position +=
+          dashstream->current_fragment_keyframe_distance;
+    }
+    dashstream->actual_position =
+        MIN (dashstream->actual_position,
+        fragment.timestamp + fragment.duration);
+
+    stream->fragment.uri = fragment.uri;
+    stream->fragment.stream_time = GST_CLOCK_STIME_NONE;
+    stream->fragment.duration = GST_CLOCK_TIME_NONE;
+    stream->fragment.range_start = sync_sample->start_offset;
+    stream->fragment.range_end = sync_sample->end_offset;
+
+    GST_DEBUG_OBJECT (stream,
+        "Actual position %" GST_TIME_FORMAT,
+        GST_TIME_ARGS (dashstream->actual_position));
+
+    return GST_FLOW_OK;
+  }
+
+  if (gst_mpd_client2_get_next_fragment_timestamp (dashdemux->client,
+          dashstream->index, &ts)) {
+    if (GST_ADAPTIVE_DEMUX2_STREAM_NEED_HEADER (stream)) {
+      gst_adaptive_demux2_stream_fragment_clear (&stream->fragment);
+      gst_dash_demux_stream_update_headers_info (stream);
+    }
+
+    gst_mpd_client2_get_next_fragment (dashdemux->client, dashstream->index,
+        &fragment);
+
+    stream->fragment.uri = fragment.uri;
+    /* If mpd does not specify indexRange (i.e., null index_uri),
+     * sidx entries may not be available until download it */
+    if (isombff && dashstream->sidx_position != GST_CLOCK_TIME_NONE
+        && SIDX (dashstream)->entries) {
+      GstSidxBoxEntry *entry = SIDX_CURRENT_ENTRY (dashstream);
+      stream->fragment.range_start =
+          dashstream->sidx_base_offset + entry->offset;
+      dashstream->actual_position = stream->fragment.stream_time = entry->pts;
+      dashstream->current_fragment_timestamp = stream->fragment.stream_time =
+          entry->pts;
+      dashstream->current_fragment_duration = stream->fragment.duration =
+          entry->duration;
+      stream->fragment.range_end =
+          stream->fragment.range_start + entry->size - 1;
+      if (!playing_forward)
+        dashstream->actual_position += entry->duration;
+    } else {
+      dashstream->actual_position = stream->fragment.stream_time =
+          fragment.timestamp;
+      dashstream->current_fragment_timestamp = fragment.timestamp;
+      dashstream->current_fragment_duration = stream->fragment.duration =
+          fragment.duration;
+      if (!playing_forward)
+        dashstream->actual_position += fragment.duration;
+      if (GST_ADAPTIVE_DEMUX2_STREAM_NEED_HEADER (stream)
+          && dashstream->sidx_base_offset != 0
+          && stream->fragment.header_uri == NULL) {
+        /* This will happen with restarting everything-in-one-mp4 streams.
+         * If we previously parsed it (non-zero sidx_base_offset), we just set
+         * the header URI to the same fragment uri, and specify the range (from 0
+         * to the sidx base offset) */
+        GST_DEBUG_OBJECT (stream, "Handling restart");
+        stream->fragment.header_uri = g_strdup (stream->fragment.uri);
+        stream->fragment.header_range_start = 0;
+        stream->fragment.header_range_end = dashstream->sidx_base_offset;
+      }
+      stream->fragment.range_start =
+          MAX (fragment.range_start, dashstream->sidx_base_offset);
+      stream->fragment.range_end = fragment.range_end;
+    }
+
+    GST_DEBUG_OBJECT (stream,
+        "Actual position %" GST_TIME_FORMAT,
+        GST_TIME_ARGS (dashstream->actual_position));
+
+    return GST_FLOW_OK;
+  }
+
+  return GST_FLOW_EOS;
+}
+
+static gint
+gst_dash_demux_index_entry_search (GstSidxBoxEntry * entry, GstClockTime * ts,
+    gpointer user_data)
+{
+  GstClockTime entry_ts = entry->pts + entry->duration;
+  if (entry_ts <= *ts)
+    return -1;
+  else if (entry->pts > *ts)
+    return 1;
+  else
+    return 0;
+}
+
+static GstFlowReturn
+gst_dash_demux_stream_sidx_seek (GstDashDemux2Stream * dashstream,
+    gboolean forward, GstSeekFlags flags, GstClockTime ts,
+    GstClockTime * final_ts)
+{
+  GstSidxBox *sidx = SIDX (dashstream);
+  GstSidxBoxEntry *entry;
+  gint idx;
+  GstFlowReturn ret = GST_FLOW_OK;
+
+  if (sidx->entries_count == 0)
+    return GST_FLOW_EOS;
+
+  entry =
+      gst_util_array_binary_search (sidx->entries, sidx->entries_count,
+      sizeof (GstSidxBoxEntry),
+      (GCompareDataFunc) gst_dash_demux_index_entry_search,
+      GST_SEARCH_MODE_EXACT, &ts, NULL);
+
+  /* No exact match found, nothing in our index
+   * This is usually a bug or broken stream, as the seeking code already
+   * makes sure that we're in the correct period and segment, and only need
+   * to find the correct place inside the segment. Allow for some rounding
+   * errors and inaccuracies here though */
+  if (!entry) {
+    GstSidxBoxEntry *last_entry = &sidx->entries[sidx->entries_count - 1];
+
+    GST_WARNING_OBJECT (dashstream->parent.demux, "Couldn't find SIDX entry");
+
+    if (ts < sidx->entries[0].pts
+        && ts + 250 * GST_MSECOND >= sidx->entries[0].pts)
+      entry = &sidx->entries[0];
+    else if (ts >= last_entry->pts + last_entry->duration &&
+        ts < last_entry->pts + last_entry->duration + 250 * GST_MSECOND)
+      entry = last_entry;
+  }
+  if (!entry)
+    return GST_FLOW_EOS;
+
+  idx = entry - sidx->entries;
+
+  /* FIXME in reverse mode, if we are exactly at a fragment start it makes more
+   * sense to start from the end of the previous fragment */
+  if (!forward && idx > 0 && entry->pts == ts) {
+    idx--;
+    entry = &sidx->entries[idx];
+  }
+
+  /* Now entry->pts <= ts < entry->pts + entry->duration, need to adjust for
+   * snapping */
+  if ((flags & GST_SEEK_FLAG_SNAP_NEAREST) == GST_SEEK_FLAG_SNAP_NEAREST) {
+    if (idx + 1 < sidx->entries_count
+        && sidx->entries[idx + 1].pts - ts < ts - sidx->entries[idx].pts)
+      idx += 1;
+  } else if ((forward && (flags & GST_SEEK_FLAG_SNAP_AFTER)) || (!forward
+          && (flags & GST_SEEK_FLAG_SNAP_BEFORE))) {
+    if (idx + 1 < sidx->entries_count && entry->pts < ts)
+      idx += 1;
+  }
+
+  g_assert (sidx->entry_index < sidx->entries_count);
+
+  sidx->entry_index = idx;
+  dashstream->sidx_position = sidx->entries[idx].pts;
+
+  if (final_ts)
+    *final_ts = dashstream->sidx_position;
+
+  return ret;
+}
+
+static GstFlowReturn
+gst_dash_demux_stream_seek (GstAdaptiveDemux2Stream * stream, gboolean forward,
+    GstSeekFlags flags, GstClockTimeDiff target_rt, GstClockTimeDiff * final_rt)
+{
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+  gint last_index, last_repeat;
+  gboolean is_isobmff;
+  GstClockTime ts, final_ts;
+
+  if (target_rt < 0)
+    return GST_FLOW_ERROR;
+  ts = (GstClockTime) target_rt;
+
+  last_index = dashstream->active_stream->segment_index;
+  last_repeat = dashstream->active_stream->segment_repeat_index;
+
+  if (dashstream->adapter)
+    gst_adapter_clear (dashstream->adapter);
+  dashstream->current_offset = -1;
+  dashstream->current_index_header_or_data = 0;
+
+  dashstream->isobmff_parser.current_fourcc = 0;
+  dashstream->isobmff_parser.current_start_offset = 0;
+  dashstream->isobmff_parser.current_size = 0;
+
+  if (dashstream->moof)
+    gst_isoff_moof_box_free (dashstream->moof);
+  dashstream->moof = NULL;
+  if (dashstream->moof_sync_samples)
+    g_array_free (dashstream->moof_sync_samples, TRUE);
+  dashstream->moof_sync_samples = NULL;
+  dashstream->current_sync_sample = -1;
+  dashstream->target_time = GST_CLOCK_TIME_NONE;
+
+  is_isobmff = gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client);
+
+  if (!gst_mpd_client2_stream_seek (dashdemux->client,
+          dashstream->active_stream, forward,
+          is_isobmff ? (flags & (~(GST_SEEK_FLAG_SNAP_BEFORE |
+                      GST_SEEK_FLAG_SNAP_AFTER))) : flags, ts, &final_ts)) {
+    return GST_FLOW_EOS;
+  }
+
+  if (final_rt)
+    *final_rt = final_ts;
+
+  if (is_isobmff) {
+    GstClockTime period_start, offset;
+
+    period_start = gst_mpd_client2_get_period_start_time (dashdemux->client);
+    offset =
+        gst_mpd_client2_get_stream_presentation_offset (dashdemux->client,
+        dashstream->index);
+
+    if (G_UNLIKELY (ts < period_start))
+      ts = offset;
+    else
+      ts += offset - period_start;
+
+    if (last_index != dashstream->active_stream->segment_index ||
+        last_repeat != dashstream->active_stream->segment_repeat_index) {
+      GST_LOG_OBJECT (stream, "Segment index was changed, reset sidx parser");
+      gst_isoff_sidx_parser_clear (&dashstream->sidx_parser);
+      dashstream->sidx_base_offset = 0;
+      dashstream->allow_sidx = TRUE;
+    }
+
+    if (dashstream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+      if (gst_dash_demux_stream_sidx_seek (dashstream, forward, flags, ts,
+              &final_ts) != GST_FLOW_OK) {
+        GST_ERROR_OBJECT (stream, "Couldn't find position in sidx");
+        dashstream->sidx_position = GST_CLOCK_TIME_NONE;
+        gst_isoff_sidx_parser_clear (&dashstream->sidx_parser);
+      }
+      if (final_rt)
+        *final_rt = final_ts;
+      dashstream->pending_seek_ts = GST_CLOCK_TIME_NONE;
+    } else {
+      /* no index yet, seek when we have it */
+      /* FIXME - the final_ts won't be correct here */
+      dashstream->pending_seek_ts = ts;
+    }
+  }
+
+  stream->discont = TRUE;
+
+  return GST_FLOW_OK;
+}
+
+static gboolean
+gst_dash_demux_stream_has_next_sync_sample (GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (stream->demux);
+
+  if (dashstream->moof_sync_samples &&
+      GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (demux)) {
+    gboolean playing_forward = (demux->segment.rate > 0.0);
+    if (playing_forward) {
+      if (dashstream->current_sync_sample + 1 <
+          dashstream->moof_sync_samples->len)
+        return TRUE;
+    } else {
+      if (dashstream->current_sync_sample >= 1)
+        return TRUE;
+    }
+  }
+  return FALSE;
+}
+
+static gboolean
+gst_dash_demux_stream_has_next_subfragment (GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  GstSidxBox *sidx = SIDX (dashstream);
+
+  if (dashstream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+    gboolean playing_forward = (stream->demux->segment.rate > 0.0);
+    if (playing_forward) {
+      if (sidx->entry_index + 1 < sidx->entries_count)
+        return TRUE;
+    } else {
+      if (sidx->entry_index >= 1)
+        return TRUE;
+    }
+  }
+  return FALSE;
+}
+
+static gboolean
+gst_dash_demux_stream_advance_sync_sample (GstAdaptiveDemux2Stream * stream,
+    GstClockTime target_time)
+{
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  GstAdaptiveDemux *demux = stream->demux;
+  gboolean playing_forward = (demux->segment.rate > 0.0);
+  gboolean fragment_finished = FALSE;
+  guint idx = -1;
+
+  if (GST_CLOCK_TIME_IS_VALID (target_time)) {
+
+    GST_LOG_OBJECT (stream,
+        "target_time:%" GST_TIME_FORMAT " fragment ts %" GST_TIME_FORMAT
+        " average keyframe dist: %" GST_TIME_FORMAT
+        " current keyframe dist: %" GST_TIME_FORMAT
+        " fragment duration:%" GST_TIME_FORMAT,
+        GST_TIME_ARGS (target_time),
+        GST_TIME_ARGS (dashstream->current_fragment_timestamp),
+        GST_TIME_ARGS (dashstream->keyframe_average_distance),
+        GST_TIME_ARGS (dashstream->current_fragment_keyframe_distance),
+        GST_TIME_ARGS (stream->fragment.duration));
+
+    if (playing_forward) {
+      idx =
+          (target_time -
+          dashstream->current_fragment_timestamp) /
+          dashstream->current_fragment_keyframe_distance;
+
+      /* Prevent getting stuck in a loop due to rounding errors */
+      if (idx == dashstream->current_sync_sample)
+        idx++;
+    } else {
+      GstClockTime end_time =
+          dashstream->current_fragment_timestamp +
+          dashstream->current_fragment_duration;
+
+      if (end_time < target_time) {
+        idx = dashstream->moof_sync_samples->len;
+      } else {
+        idx =
+            (end_time -
+            target_time) / dashstream->current_fragment_keyframe_distance;
+        if (idx == dashstream->moof_sync_samples->len) {
+          dashstream->current_sync_sample = -1;
+          fragment_finished = TRUE;
+          goto beach;
+        }
+        idx = dashstream->moof_sync_samples->len - 1 - idx;
+      }
+
+      /* Prevent getting stuck in a loop due to rounding errors */
+      if (idx == dashstream->current_sync_sample) {
+        if (idx == 0) {
+          dashstream->current_sync_sample = -1;
+          fragment_finished = TRUE;
+          goto beach;
+        }
+
+        idx--;
+      }
+    }
+  }
+
+  GST_DEBUG_OBJECT (stream,
+      "Advancing sync sample #%d target #%d",
+      dashstream->current_sync_sample, idx);
+
+  if (idx != -1 && idx >= dashstream->moof_sync_samples->len) {
+    dashstream->current_sync_sample = -1;
+    fragment_finished = TRUE;
+    goto beach;
+  }
+
+  if (playing_forward) {
+    /* Try to get the sync sample for the target time */
+    if (idx != -1) {
+      dashstream->current_sync_sample = idx;
+    } else {
+      dashstream->current_sync_sample++;
+      if (dashstream->current_sync_sample >= dashstream->moof_sync_samples->len) {
+        fragment_finished = TRUE;
+      }
+    }
+  } else {
+    if (idx != -1) {
+      dashstream->current_sync_sample = idx;
+    } else if (dashstream->current_sync_sample == -1) {
+      dashstream->current_sync_sample = dashstream->moof_sync_samples->len - 1;
+    } else if (dashstream->current_sync_sample == 0) {
+      dashstream->current_sync_sample = -1;
+      fragment_finished = TRUE;
+    } else {
+      dashstream->current_sync_sample--;
+    }
+  }
+
+beach:
+  GST_DEBUG_OBJECT (stream,
+      "Advancing sync sample #%d fragment_finished:%d",
+      dashstream->current_sync_sample, fragment_finished);
+
+  if (!fragment_finished)
+    stream->discont = TRUE;
+
+  return !fragment_finished;
+}
+
+static gboolean
+gst_dash_demux_stream_advance_subfragment (GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+
+  GstSidxBox *sidx = SIDX (dashstream);
+  gboolean fragment_finished = TRUE;
+
+  if (dashstream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+    gboolean playing_forward = (stream->demux->segment.rate > 0.0);
+    if (playing_forward) {
+      gint idx = ++sidx->entry_index;
+      if (idx < sidx->entries_count) {
+        fragment_finished = FALSE;
+      }
+
+      if (idx == sidx->entries_count)
+        dashstream->sidx_position =
+            sidx->entries[idx - 1].pts + sidx->entries[idx - 1].duration;
+      else
+        dashstream->sidx_position = sidx->entries[idx].pts;
+    } else {
+      gint idx = --sidx->entry_index;
+
+      if (idx >= 0) {
+        fragment_finished = FALSE;
+        dashstream->sidx_position = sidx->entries[idx].pts;
+      } else {
+        dashstream->sidx_position = GST_CLOCK_TIME_NONE;
+      }
+    }
+  }
+
+  GST_DEBUG_OBJECT (stream, "New sidx index: %d / %d. "
+      "Finished fragment: %d", sidx->entry_index, sidx->entries_count,
+      fragment_finished);
+
+  return !fragment_finished;
+}
+
+static gboolean
+gst_dash_demux_stream_has_next_fragment (GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (dashdemux);
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  gboolean playing_forward = (demux->segment.rate > 0.0);
+
+  if (dashstream->moof_sync_samples &&
+      GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux)) {
+    if (gst_dash_demux_stream_has_next_sync_sample (stream))
+      return TRUE;
+  }
+
+  if (gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client)) {
+    if (gst_dash_demux_stream_has_next_subfragment (stream))
+      return TRUE;
+  }
+
+  return gst_mpd_client2_has_next_segment (dashdemux->client,
+      dashstream->active_stream, playing_forward);
+}
+
+/* The goal here is to figure out, once we have pushed a keyframe downstream,
+ * what the next ideal keyframe to download is.
+ *
+ * This is done based on:
+ * * the current internal position (i.e. actual_position)
+ * * the reported downstream position (QoS feedback)
+ * * the average keyframe download time (average_download_time)
+ */
+static GstClockTime
+gst_dash_demux_stream_get_target_time (GstDashDemux2 * dashdemux,
+    GstAdaptiveDemux2Stream * stream, GstClockTime cur_position,
+    GstClockTime min_skip)
+{
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (dashdemux);
+  GstClockTime cur_running, min_running, min_position;
+  GstClockTimeDiff diff;
+  GstClockTime ret = cur_position;
+  GstClockTime deadline;
+  GstClockTime upstream_earliest_time;
+  GstClockTime earliest_time = GST_CLOCK_TIME_NONE;
+  gdouble play_rate = gst_adaptive_demux_play_rate (stream->demux);
+  GstClockTime period_start = gst_dash_demux_get_period_start_time (demux);
+  GstClockTime pts_offset =
+      gst_dash_demux_get_presentation_offset (demux, stream);
+
+  g_assert (min_skip > 0);
+
+  /* minimum stream position we have to skip to */
+  if (play_rate > 0.0)
+    min_position = cur_position + min_skip;
+  else if (cur_position < min_skip)
+    min_position = 0;
+  else
+    min_position = cur_position - min_skip;
+
+  /* Move from the internal time to the demux segment, so we can
+   * convert to running time and back */
+  cur_position += (period_start - pts_offset);
+
+  /* Use current clock time or the QoS earliest time, whichever is further in
+   * the future. The QoS time is only updated on every QoS event and
+   * especially not if e.g. a videodecoder or converter drops a frame further
+   * downstream.
+   *
+   * We only use the times if we ever received a QoS event since the last
+   * flush, as otherwise base_time and clock might not be correct because of a
+   * still pre-rolling sink
+   */
+  upstream_earliest_time =
+      gst_adaptive_demux2_get_qos_earliest_time ((GstAdaptiveDemux *)
+      dashdemux);
+  if (upstream_earliest_time != GST_CLOCK_TIME_NONE) {
+    GstClock *clock;
+
+    clock = gst_element_get_clock (GST_ELEMENT_CAST (dashdemux));
+
+    if (clock) {
+      GstClockTime base_time;
+      GstClockTime now_time;
+
+      base_time = gst_element_get_base_time (GST_ELEMENT_CAST (dashdemux));
+      now_time = gst_clock_get_time (clock);
+      if (now_time > base_time)
+        now_time -= base_time;
+      else
+        now_time = 0;
+
+      gst_object_unref (clock);
+
+      earliest_time = MAX (now_time, upstream_earliest_time);
+    } else {
+      earliest_time = upstream_earliest_time;
+    }
+  }
+
+  /* our current position in running time */
+  cur_running =
+      gst_segment_to_running_time (&demux->segment, GST_FORMAT_TIME,
+      cur_position);
+
+  /* the minimum position we have to skip to in running time */
+  min_running =
+      gst_segment_to_running_time (&demux->segment, GST_FORMAT_TIME,
+      min_position);
+
+  GST_DEBUG_OBJECT (stream,
+      "position: current %" GST_TIME_FORMAT " min next %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (cur_position), GST_TIME_ARGS (min_position));
+  GST_DEBUG_OBJECT (stream,
+      "running time: current %" GST_TIME_FORMAT " min next %" GST_TIME_FORMAT
+      " earliest %" GST_TIME_FORMAT, GST_TIME_ARGS (cur_running),
+      GST_TIME_ARGS (min_running), GST_TIME_ARGS (earliest_time));
+
+  /* Take configured maximum video bandwidth and framerate into account */
+  {
+    GstClockTime min_run_dist, min_frame_dist, diff = 0;
+    guint max_fps_n, max_fps_d;
+
+    min_run_dist = min_skip / ABS (play_rate);
+
+    if (dashdemux->max_video_framerate_n != 0) {
+      max_fps_n = dashdemux->max_video_framerate_n;
+      max_fps_d = dashdemux->max_video_framerate_d;
+    } else {
+      /* more than 10 fps is not very useful if we're skipping anyway */
+      max_fps_n = 10;
+      max_fps_d = 1;
+    }
+
+    min_frame_dist = gst_util_uint64_scale_ceil (GST_SECOND,
+        max_fps_d, max_fps_n);
+
+    GST_DEBUG_OBJECT (stream,
+        "Have max framerate %d/%d - Min dist %" GST_TIME_FORMAT
+        ", min requested dist %" GST_TIME_FORMAT,
+        max_fps_n, max_fps_d,
+        GST_TIME_ARGS (min_run_dist), GST_TIME_ARGS (min_frame_dist));
+    if (min_frame_dist > min_run_dist)
+      diff = MAX (diff, min_frame_dist - min_run_dist);
+
+    if (demux->max_bitrate != 0) {
+      guint64 max_bitrate = gst_util_uint64_scale_ceil (GST_SECOND,
+          8 * dashstream->keyframe_average_size,
+          dashstream->keyframe_average_distance) * ABS (play_rate);
+
+      if (max_bitrate > demux->max_bitrate) {
+        min_frame_dist = gst_util_uint64_scale_ceil (GST_SECOND,
+            8 * dashstream->keyframe_average_size,
+            demux->max_bitrate) * ABS (play_rate);
+
+        GST_DEBUG_OBJECT (stream,
+            "Have max bitrate %u - Min dist %" GST_TIME_FORMAT
+            ", min requested dist %" GST_TIME_FORMAT, demux->max_bitrate,
+            GST_TIME_ARGS (min_run_dist), GST_TIME_ARGS (min_frame_dist));
+        if (min_frame_dist > min_run_dist)
+          diff = MAX (diff, min_frame_dist - min_run_dist);
+      }
+    }
+
+    if (diff > 0) {
+      GST_DEBUG_OBJECT (stream,
+          "Skipping further ahead by %" GST_TIME_FORMAT, GST_TIME_ARGS (diff));
+      min_running += diff;
+    }
+  }
+
+  if (earliest_time == GST_CLOCK_TIME_NONE) {
+    GstClockTime run_key_dist;
+
+    run_key_dist = dashstream->keyframe_average_distance / ABS (play_rate);
+
+    /* If we don't have downstream information (such as at startup or
+     * without live sinks), just get the next time by taking the minimum
+     * amount we have to skip ahead
+     * Except if it takes us longer to download */
+    if (run_key_dist > dashstream->average_download_time)
+      ret =
+          gst_segment_position_from_running_time (&demux->segment,
+          GST_FORMAT_TIME, min_running);
+    else
+      ret = gst_segment_position_from_running_time (&demux->segment,
+          GST_FORMAT_TIME,
+          min_running - run_key_dist + dashstream->average_download_time);
+
+    GST_DEBUG_OBJECT (stream,
+        "Advancing to %" GST_TIME_FORMAT " (was %" GST_TIME_FORMAT ")",
+        GST_TIME_ARGS (ret), GST_TIME_ARGS (min_position));
+
+    goto out;
+  }
+
+  /* Figure out the difference, in running time, between where we are and
+   * where downstream is */
+  diff = min_running - earliest_time;
+  GST_LOG_OBJECT (stream,
+      "min_running %" GST_TIME_FORMAT " diff %" GST_STIME_FORMAT
+      " average_download %" GST_TIME_FORMAT, GST_TIME_ARGS (min_running),
+      GST_STIME_ARGS (diff), GST_TIME_ARGS (dashstream->average_download_time));
+
+  /* Have at least 500ms or 3 keyframes safety between current position and downstream */
+  deadline = MAX (500 * GST_MSECOND, 3 * dashstream->average_download_time);
+
+  /* The furthest away we are from the current position, the least we need to advance */
+  if (diff < 0 || diff < deadline) {
+    /* Force skipping (but not more than 1s ahead) */
+    ret =
+        gst_segment_position_from_running_time (&demux->segment,
+        GST_FORMAT_TIME, earliest_time + MIN (deadline, GST_SECOND));
+    GST_DEBUG_OBJECT (stream,
+        "MUST SKIP to at least %" GST_TIME_FORMAT " (was %" GST_TIME_FORMAT ")",
+        GST_TIME_ARGS (ret), GST_TIME_ARGS (min_position));
+  } else if (diff < 4 * dashstream->average_download_time) {
+    /* Go forward a bit less aggressively (and at most 1s forward) */
+    ret = gst_segment_position_from_running_time (&demux->segment,
+        GST_FORMAT_TIME, min_running + MIN (GST_SECOND,
+            2 * dashstream->average_download_time));
+    GST_DEBUG_OBJECT (stream,
+        "MUST SKIP to at least %" GST_TIME_FORMAT " (was %" GST_TIME_FORMAT ")",
+        GST_TIME_ARGS (ret), GST_TIME_ARGS (min_position));
+  } else {
+    /* Get the next position satisfying the download time */
+    ret = gst_segment_position_from_running_time (&demux->segment,
+        GST_FORMAT_TIME, min_running);
+    GST_DEBUG_OBJECT (stream,
+        "Advance to %" GST_TIME_FORMAT " (was %" GST_TIME_FORMAT ")",
+        GST_TIME_ARGS (ret), GST_TIME_ARGS (min_position));
+  }
+
+out:
+
+  /* Move back the return time to internal timestamp */
+  if (ret != GST_CLOCK_TIME_NONE) {
+    ret -= (period_start - pts_offset);
+  }
+
+  {
+    GstClockTime cur_skip =
+        (cur_position < ret) ? ret - cur_position : cur_position - ret;
+
+    if (dashstream->average_skip_size == 0) {
+      dashstream->average_skip_size = cur_skip;
+    } else {
+      dashstream->average_skip_size =
+          (cur_skip + 3 * dashstream->average_skip_size) / 4;
+    }
+
+    if (dashstream->average_skip_size >
+        cur_skip + dashstream->keyframe_average_distance
+        && dashstream->average_skip_size > min_skip) {
+      if (play_rate > 0)
+        ret = cur_position + dashstream->average_skip_size;
+      else if (cur_position > dashstream->average_skip_size)
+        ret = cur_position - dashstream->average_skip_size;
+      else
+        ret = 0;
+    }
+  }
+
+  return ret;
+}
+
+static GstFlowReturn
+gst_dash_demux_stream_advance_fragment (GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+  GstClockTime target_time = GST_CLOCK_TIME_NONE;
+  GstClockTime previous_position;
+  gboolean playing_forward =
+      (GST_ADAPTIVE_DEMUX_CAST (dashdemux)->segment.rate > 0.0);
+  GstFlowReturn ret;
+
+  GST_DEBUG_OBJECT (stream, "Advance fragment");
+
+  /* Update download statistics */
+  if (dashstream->moof_sync_samples &&
+      GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux) &&
+      GST_CLOCK_TIME_IS_VALID (stream->last_download_time)) {
+    if (GST_CLOCK_TIME_IS_VALID (dashstream->average_download_time)) {
+      dashstream->average_download_time =
+          (3 * dashstream->average_download_time +
+          stream->last_download_time) / 4;
+    } else {
+      dashstream->average_download_time = stream->last_download_time;
+    }
+
+    GST_DEBUG_OBJECT (stream,
+        "Download time last: %" GST_TIME_FORMAT " average: %" GST_TIME_FORMAT,
+        GST_TIME_ARGS (stream->last_download_time),
+        GST_TIME_ARGS (dashstream->average_download_time));
+  }
+
+  previous_position = dashstream->actual_position;
+
+  /* Update internal position */
+  if (GST_CLOCK_TIME_IS_VALID (dashstream->actual_position)) {
+    GstClockTime dur;
+    if (dashstream->moof_sync_samples
+        && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux)) {
+      GST_LOG_OBJECT (stream, "current sync sample #%d",
+          dashstream->current_sync_sample);
+      if (dashstream->current_sync_sample == -1) {
+        dur = 0;
+      } else if (dashstream->current_sync_sample <
+          dashstream->moof_sync_samples->len) {
+        dur = dashstream->current_fragment_keyframe_distance;
+      } else {
+        if (gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client) &&
+            dashstream->sidx_position != GST_CLOCK_TIME_NONE
+            && SIDX (dashstream)->entries) {
+          GstSidxBoxEntry *entry = SIDX_CURRENT_ENTRY (dashstream);
+          dur = entry->duration;
+        } else {
+          dur =
+              dashstream->current_fragment_timestamp +
+              dashstream->current_fragment_duration -
+              dashstream->actual_position;
+        }
+      }
+    } else if (gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client) &&
+        dashstream->sidx_position != GST_CLOCK_TIME_NONE
+        && SIDX (dashstream)->entries) {
+      GstSidxBoxEntry *entry = SIDX_CURRENT_ENTRY (dashstream);
+      dur = entry->duration;
+    } else {
+      dur = stream->fragment.duration;
+    }
+
+    if (dashstream->moof_sync_samples
+        && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux)) {
+      /* We just downloaded the header, we actually use the previous
+       * target_time now as it was not used up yet */
+      if (dashstream->current_sync_sample == -1)
+        target_time = dashstream->target_time;
+      else
+        target_time =
+            gst_dash_demux_stream_get_target_time (dashdemux, stream,
+            dashstream->actual_position, dur);
+      dashstream->actual_position = target_time;
+    } else {
+      /* Adjust based on direction */
+      if (playing_forward)
+        dashstream->actual_position += dur;
+      else if (dashstream->actual_position >= dur)
+        dashstream->actual_position -= dur;
+      else
+        dashstream->actual_position = 0;
+    }
+
+    GST_DEBUG_OBJECT (stream,
+        "Actual position %" GST_TIME_FORMAT,
+        GST_TIME_ARGS (dashstream->actual_position));
+  }
+  dashstream->target_time = target_time;
+
+  GST_DEBUG_OBJECT (stream, "target_time: %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (target_time));
+
+  /* If downloading only keyframes, switch to the next one or fall through */
+  if (dashstream->moof_sync_samples &&
+      GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux)) {
+    if (gst_dash_demux_stream_advance_sync_sample (stream, target_time))
+      return GST_FLOW_OK;
+  }
+
+  dashstream->isobmff_parser.current_fourcc = 0;
+  dashstream->isobmff_parser.current_start_offset = 0;
+  dashstream->isobmff_parser.current_size = 0;
+
+  if (dashstream->moof)
+    gst_isoff_moof_box_free (dashstream->moof);
+  dashstream->moof = NULL;
+  if (dashstream->moof_sync_samples)
+    g_array_free (dashstream->moof_sync_samples, TRUE);
+  dashstream->moof_sync_samples = NULL;
+  dashstream->current_sync_sample = -1;
+
+  /* Check if we just need to 'advance' to the next fragment, or if we
+   * need to skip by more. */
+  if (GST_CLOCK_TIME_IS_VALID (target_time)
+      && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux) &&
+      dashstream->active_stream->mimeType == GST_STREAM_VIDEO) {
+    GstClockTime actual_ts;
+    GstClockTimeDiff actual_rt;
+    GstSeekFlags flags = 0;
+
+    /* Key-unit trick mode, seek to fragment containing target time
+     *
+     * We first try seeking without snapping. As above code to skip keyframes
+     * in the current fragment was not successful, we should go at least one
+     * fragment ahead. Due to rounding errors we could end up at the same
+     * fragment again here, in which case we retry seeking with the SNAP_AFTER
+     * flag.
+     *
+     * We don't always set that flag as we would then end up one further
+     * fragment in the future in all good cases.
+     */
+    while (TRUE) {
+      ret =
+          gst_dash_demux_stream_seek (stream, playing_forward, flags,
+          target_time, &actual_rt);
+
+      if (ret != GST_FLOW_OK) {
+        GST_WARNING_OBJECT (stream,
+            "Failed to seek to %" GST_TIME_FORMAT, GST_TIME_ARGS (target_time));
+        /* Give up */
+        if (flags != 0)
+          break;
+
+        /* Retry with skipping ahead */
+        flags |= GST_SEEK_FLAG_SNAP_AFTER;
+        continue;
+      }
+      actual_ts = actual_rt;
+
+      GST_DEBUG_OBJECT (stream,
+          "Skipped to %" GST_TIME_FORMAT " (wanted %" GST_TIME_FORMAT ", was %"
+          GST_TIME_FORMAT ")", GST_TIME_ARGS (actual_ts),
+          GST_TIME_ARGS (target_time), GST_TIME_ARGS (previous_position));
+
+      if ((playing_forward && actual_ts <= previous_position) ||
+          (!playing_forward && actual_ts >= previous_position)) {
+        /* Give up */
+        if (flags != 0)
+          break;
+
+        /* Retry with forcing skipping ahead */
+        flags |= GST_SEEK_FLAG_SNAP_AFTER;
+
+        continue;
+      }
+
+      /* All good */
+      break;
+    }
+  } else {
+    /* Normal mode, advance to the next fragment */
+    if (gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client)) {
+      if (gst_dash_demux_stream_advance_subfragment (stream))
+        return GST_FLOW_OK;
+    }
+
+    if (dashstream->adapter)
+      gst_adapter_clear (dashstream->adapter);
+
+    gst_isoff_sidx_parser_clear (&dashstream->sidx_parser);
+    dashstream->sidx_base_offset = 0;
+    dashstream->sidx_position = GST_CLOCK_TIME_NONE;
+    dashstream->allow_sidx = TRUE;
+
+    ret = gst_mpd_client2_advance_segment (dashdemux->client,
+        dashstream->active_stream, playing_forward);
+  }
+  return ret;
+}
+
+static gboolean
+gst_dash_demux_stream_select_bitrate (GstAdaptiveDemux2Stream * stream,
+    guint64 bitrate)
+{
+  GstActiveStream *active_stream = NULL;
+  GList *rep_list = NULL;
+  gint new_index;
+  GstAdaptiveDemux *base_demux = stream->demux;
+  GstDashDemux2 *demux = GST_DASH_DEMUX_CAST (stream->demux);
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  gboolean ret = FALSE;
+  gdouble play_rate = gst_adaptive_demux_play_rate (base_demux);
+
+  active_stream = dashstream->active_stream;
+  if (active_stream == NULL) {
+    goto end;
+  }
+
+  /* In key-frame trick mode don't change bitrates */
+  if (GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (demux)) {
+    GST_DEBUG_OBJECT (demux, "In key-frame trick mode, not changing bitrates");
+    goto end;
+  }
+
+  /* retrieve representation list */
+  if (active_stream->cur_adapt_set)
+    rep_list = active_stream->cur_adapt_set->Representations;
+  if (!rep_list) {
+    goto end;
+  }
+
+  GST_DEBUG_OBJECT (stream,
+      "Trying to change to bitrate: %" G_GUINT64_FORMAT, bitrate);
+
+  /* get representation index with current max_bandwidth */
+  if (GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (base_demux) ||
+      ABS (play_rate) <= 1.0) {
+    new_index =
+        gst_mpd_client2_get_rep_idx_with_max_bandwidth (rep_list, bitrate,
+        demux->max_video_width, demux->max_video_height,
+        demux->max_video_framerate_n, demux->max_video_framerate_d);
+  } else {
+    new_index =
+        gst_mpd_client2_get_rep_idx_with_max_bandwidth (rep_list,
+        bitrate / ABS (play_rate), demux->max_video_width,
+        demux->max_video_height, demux->max_video_framerate_n,
+        demux->max_video_framerate_d);
+  }
+
+  /* if no representation has the required bandwidth, take the lowest one */
+  if (new_index == -1)
+    new_index = gst_mpd_client2_get_rep_idx_with_min_bandwidth (rep_list);
+
+  if (new_index != active_stream->representation_idx) {
+    GstMPDRepresentationNode *rep = g_list_nth_data (rep_list, new_index);
+    GST_INFO_OBJECT (demux, "Changing representation idx: %d %d %u",
+        dashstream->index, new_index, rep->bandwidth);
+    if (gst_mpd_client2_setup_representation (demux->client, active_stream,
+            rep)) {
+      GstCaps *caps;
+
+      GST_INFO_OBJECT (demux, "Switching bitrate to %d",
+          active_stream->cur_representation->bandwidth);
+      caps = gst_dash_demux_get_input_caps (demux, active_stream);
+      gst_adaptive_demux2_stream_set_caps (stream, caps);
+      ret = TRUE;
+
+    } else {
+      GST_WARNING_OBJECT (demux, "Can not switch representation, aborting...");
+    }
+  }
+
+  if (ret) {
+    if (gst_mpd_client2_has_isoff_ondemand_profile (demux->client)
+        && SIDX (dashstream)->entries) {
+      /* store our current position to change to the same one in a different
+       * representation if needed */
+      if (SIDX (dashstream)->entry_index < SIDX (dashstream)->entries_count)
+        dashstream->sidx_position = SIDX_CURRENT_ENTRY (dashstream)->pts;
+      else if (SIDX (dashstream)->entry_index >=
+          SIDX (dashstream)->entries_count)
+        dashstream->sidx_position =
+            SIDX_ENTRY (dashstream,
+            SIDX (dashstream)->entries_count - 1)->pts + SIDX_ENTRY (dashstream,
+            SIDX (dashstream)->entries_count - 1)->duration;
+      else
+        dashstream->sidx_position = GST_CLOCK_TIME_NONE;
+    } else {
+      dashstream->sidx_position = GST_CLOCK_TIME_NONE;
+    }
+
+    gst_isoff_sidx_parser_clear (&dashstream->sidx_parser);
+    dashstream->sidx_base_offset = 0;
+    dashstream->allow_sidx = TRUE;
+
+    /* Reset ISOBMFF box parsing state */
+    dashstream->isobmff_parser.current_fourcc = 0;
+    dashstream->isobmff_parser.current_start_offset = 0;
+    dashstream->isobmff_parser.current_size = 0;
+
+    dashstream->current_offset = -1;
+    dashstream->current_index_header_or_data = 0;
+
+    if (dashstream->adapter)
+      gst_adapter_clear (dashstream->adapter);
+
+    if (dashstream->moof)
+      gst_isoff_moof_box_free (dashstream->moof);
+    dashstream->moof = NULL;
+    if (dashstream->moof_sync_samples)
+      g_array_free (dashstream->moof_sync_samples, TRUE);
+    dashstream->moof_sync_samples = NULL;
+    dashstream->current_sync_sample = -1;
+    dashstream->target_time = GST_CLOCK_TIME_NONE;
+  }
+
+end:
+  return ret;
+}
+
+#define SEEK_UPDATES_PLAY_POSITION(r, start_type, stop_type) \
+  ((r >= 0 && start_type != GST_SEEK_TYPE_NONE) || \
+   (r < 0 && stop_type != GST_SEEK_TYPE_NONE))
+
+static gboolean
+gst_dash_demux_seek (GstAdaptiveDemux * demux, GstEvent * seek)
+{
+  gdouble rate;
+  GstFormat format;
+  GstSeekFlags flags;
+  GstSeekType start_type, stop_type;
+  gint64 start, stop;
+  GList *list;
+  GstClockTime current_pos, target_pos;
+  guint current_period;
+  GstStreamPeriod *period;
+  GList *iter;
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+
+  gst_event_parse_seek (seek, &rate, &format, &flags, &start_type, &start,
+      &stop_type, &stop);
+
+  if (!SEEK_UPDATES_PLAY_POSITION (rate, start_type, stop_type)) {
+    /* nothing to do if we don't have to update the current position */
+    return TRUE;
+  }
+
+  if (rate > 0.0) {
+    target_pos = (GstClockTime) start;
+  } else {
+    target_pos = (GstClockTime) stop;
+  }
+
+  /* select the requested Period in the Media Presentation */
+  if (!gst_mpd_client2_setup_media_presentation (dashdemux->client, target_pos,
+          -1, NULL))
+    return FALSE;
+
+  current_period = 0;
+  for (list = g_list_first (dashdemux->client->periods); list;
+      list = g_list_next (list)) {
+    period = list->data;
+    current_pos = period->start;
+    current_period = period->number;
+    GST_DEBUG_OBJECT (demux, "Looking at period %u) start:%"
+        GST_TIME_FORMAT " - duration:%"
+        GST_TIME_FORMAT ") for position %" GST_TIME_FORMAT,
+        current_period, GST_TIME_ARGS (current_pos),
+        GST_TIME_ARGS (period->duration), GST_TIME_ARGS (target_pos));
+    if (current_pos <= target_pos
+        && target_pos <= current_pos + period->duration) {
+      break;
+    }
+  }
+  if (list == NULL) {
+    GST_WARNING_OBJECT (demux, "Could not find seeked Period");
+    return FALSE;
+  }
+
+  if (current_period != gst_mpd_client2_get_period_index (dashdemux->client)) {
+    GST_DEBUG_OBJECT (demux, "Seeking to Period %d", current_period);
+
+    /* clean old active stream list, if any */
+    gst_mpd_client2_active_streams_free (dashdemux->client);
+
+    /* setup video, audio and subtitle streams, starting from the new Period */
+    if (!gst_mpd_client2_set_period_index (dashdemux->client, current_period)
+        || !gst_dash_demux_setup_all_streams (dashdemux))
+      return FALSE;
+  }
+
+  /* Update the current sequence on all streams */
+  for (iter = demux->input_period->streams; iter; iter = g_list_next (iter)) {
+    GstAdaptiveDemux2Stream *stream = iter->data;
+    GstDashDemux2Stream *dashstream = iter->data;
+
+    dashstream->average_skip_size = 0;
+    if (gst_dash_demux_stream_seek (stream, rate >= 0, 0, target_pos,
+            NULL) != GST_FLOW_OK)
+      return FALSE;
+  }
+
+  return TRUE;
+}
+
+static gint64
+gst_dash_demux_get_manifest_update_interval (GstAdaptiveDemux * demux)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+  return MIN (dashdemux->client->mpd_root_node->minimumUpdatePeriod * 1000,
+      SLOW_CLOCK_UPDATE_INTERVAL);
+}
+
+static GstFlowReturn
+gst_dash_demux_update_manifest_data (GstAdaptiveDemux * demux,
+    GstBuffer * buffer)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+  GstMPDClient2 *new_client = NULL;
+  GstMapInfo mapinfo;
+
+  GST_DEBUG_OBJECT (demux, "Updating manifest file from URL");
+
+  /* parse the manifest file */
+  new_client = gst_mpd_client2_new ();
+  gst_mpd_client2_set_download_helper (new_client, demux->download_helper);
+  new_client->mpd_uri = g_strdup (demux->manifest_uri);
+  new_client->mpd_base_uri = g_strdup (demux->manifest_base_uri);
+  gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
+
+  if (gst_mpd_client2_parse (new_client, (gchar *) mapinfo.data, mapinfo.size)) {
+    const gchar *period_id;
+    guint period_idx;
+    GList *iter;
+    GList *streams_iter;
+
+    /* prepare the new manifest and try to transfer the stream position
+     * status from the old manifest client  */
+
+    GST_DEBUG_OBJECT (demux, "Updating manifest");
+
+    period_id = gst_mpd_client2_get_period_id (dashdemux->client);
+    period_idx = gst_mpd_client2_get_period_index (dashdemux->client);
+
+    /* setup video, audio and subtitle streams, starting from current Period */
+    if (!gst_mpd_client2_setup_media_presentation (new_client, -1,
+            (period_id ? -1 : period_idx), period_id)) {
+      /* TODO */
+    }
+
+    if (period_id) {
+      if (!gst_mpd_client2_set_period_id (new_client, period_id)) {
+        GST_DEBUG_OBJECT (demux, "Error setting up the updated manifest file");
+        gst_mpd_client2_free (new_client);
+        gst_buffer_unmap (buffer, &mapinfo);
+        return GST_FLOW_EOS;
+      }
+    } else {
+      if (!gst_mpd_client2_set_period_index (new_client, period_idx)) {
+        GST_DEBUG_OBJECT (demux, "Error setting up the updated manifest file");
+        gst_mpd_client2_free (new_client);
+        gst_buffer_unmap (buffer, &mapinfo);
+        return GST_FLOW_EOS;
+      }
+    }
+
+    if (!gst_dash_demux_setup_mpdparser_streams (dashdemux, new_client)) {
+      GST_ERROR_OBJECT (demux, "Failed to setup streams on manifest " "update");
+      gst_mpd_client2_free (new_client);
+      gst_buffer_unmap (buffer, &mapinfo);
+      return GST_FLOW_ERROR;
+    }
+
+    /* update the streams to play from the next segment */
+    for (iter = demux->input_period->streams, streams_iter =
+        new_client->active_streams; iter && streams_iter;
+        iter = g_list_next (iter), streams_iter = g_list_next (streams_iter)) {
+      GstDashDemux2Stream *demux_stream = iter->data;
+      GstActiveStream *new_stream = streams_iter->data;
+      GstClockTime ts;
+
+      if (!new_stream) {
+        GST_DEBUG_OBJECT (demux,
+            "Stream of index %d is missing from manifest update",
+            demux_stream->index);
+        gst_mpd_client2_free (new_client);
+        gst_buffer_unmap (buffer, &mapinfo);
+        return GST_FLOW_EOS;
+      }
+
+      if (gst_mpd_client2_get_next_fragment_timestamp (dashdemux->client,
+              demux_stream->index, &ts)
+          || gst_mpd_client2_get_last_fragment_timestamp_end (dashdemux->client,
+              demux_stream->index, &ts)) {
+
+        /* Due to rounding when doing the timescale conversions it might happen
+         * that the ts falls back to a previous segment, leading the same data
+         * to be downloaded twice. We try to work around this by always adding
+         * 10 microseconds to get back to the correct segment. The errors are
+         * usually on the order of nanoseconds so it should be enough.
+         */
+
+        /* _get_next_fragment_timestamp() returned relative timestamp to
+         * corresponding period start, but _client_stream_seek expects absolute
+         * MPD time. */
+        ts += gst_mpd_client2_get_period_start_time (dashdemux->client);
+
+        GST_DEBUG_OBJECT (demux,
+            "Current position: %" GST_TIME_FORMAT ", updating to %"
+            GST_TIME_FORMAT, GST_TIME_ARGS (ts),
+            GST_TIME_ARGS (ts + (10 * GST_USECOND)));
+        ts += 10 * GST_USECOND;
+        gst_mpd_client2_stream_seek (new_client, new_stream,
+            demux->segment.rate >= 0, 0, ts, NULL);
+      }
+
+      demux_stream->active_stream = new_stream;
+    }
+
+    gst_mpd_client2_free (dashdemux->client);
+    dashdemux->client = new_client;
+
+    GST_DEBUG_OBJECT (demux, "Manifest file successfully updated");
+    if (dashdemux->clock_drift) {
+      gst_dash_demux_poll_clock_drift (dashdemux);
+    }
+  } else {
+    /* In most cases, this will happen if we set a wrong url in the
+     * source element and we have received the 404 HTML response instead of
+     * the manifest */
+    GST_WARNING_OBJECT (demux, "Error parsing the manifest.");
+    gst_mpd_client2_free (new_client);
+    gst_buffer_unmap (buffer, &mapinfo);
+    return GST_FLOW_ERROR;
+  }
+
+  gst_buffer_unmap (buffer, &mapinfo);
+
+  return GST_FLOW_OK;
+}
+
+static GstClockTime
+gst_dash_demux_stream_get_fragment_waiting_time (GstAdaptiveDemux2Stream *
+    stream)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (stream->demux);
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  GstDateTime *segmentAvailability;
+  GstActiveStream *active_stream = dashstream->active_stream;
+
+  segmentAvailability =
+      gst_mpd_client2_get_next_segment_availability_start_time
+      (dashdemux->client, active_stream);
+
+  if (segmentAvailability) {
+    GstClockTimeDiff diff;
+    GstClockTimeDiff clock_compensation;
+    GstDateTime *cur_time;
+
+    cur_time =
+        gst_date_time_new_from_g_date_time
+        (gst_adaptive_demux2_get_client_now_utc (GST_ADAPTIVE_DEMUX_CAST
+            (dashdemux)));
+    diff =
+        gst_mpd_client2_calculate_time_difference (cur_time,
+        segmentAvailability);
+    gst_date_time_unref (segmentAvailability);
+    gst_date_time_unref (cur_time);
+    /* subtract the server's clock drift, so that if the server's
+       time is behind our idea of UTC, we need to sleep for longer
+       before requesting a fragment */
+    clock_compensation =
+        gst_dash_demux_get_clock_compensation (dashdemux) * GST_USECOND;
+
+    if (diff > clock_compensation)
+      return (diff - clock_compensation);
+  }
+  return 0;
+}
+
+static gboolean
+gst_dash_demux_has_next_period (GstAdaptiveDemux * demux)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+
+  if (demux->segment.rate >= 0)
+    return gst_mpd_client2_has_next_period (dashdemux->client);
+  else
+    return gst_mpd_client2_has_previous_period (dashdemux->client);
+}
+
+static void
+gst_dash_demux_advance_period (GstAdaptiveDemux * demux)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+
+  if (demux->segment.rate >= 0) {
+    if (!gst_mpd_client2_set_period_index (dashdemux->client,
+            gst_mpd_client2_get_period_index (dashdemux->client) + 1)) {
+      /* TODO error */
+      return;
+    }
+  } else {
+    if (!gst_mpd_client2_set_period_index (dashdemux->client,
+            gst_mpd_client2_get_period_index (dashdemux->client) - 1)) {
+      /* TODO error */
+      return;
+    }
+  }
+
+  gst_dash_demux_setup_all_streams (dashdemux);
+  gst_mpd_client2_seek_to_first_segment (dashdemux->client);
+}
+
+static GstBuffer *
+_gst_buffer_split (GstBuffer * buffer, gint offset, gsize size)
+{
+  GstBuffer *newbuf = gst_buffer_copy_region (buffer,
+      GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_META
+      | GST_BUFFER_COPY_MEMORY, offset, size == -1 ? size : size - offset);
+
+  gst_buffer_resize (buffer, 0, offset);
+
+  return newbuf;
+}
+
+static gboolean
+gst_dash_demux_stream_fragment_start (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+
+  GST_LOG_OBJECT (stream, "Actual position %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (dashstream->actual_position));
+
+  dashstream->current_index_header_or_data = 0;
+  dashstream->current_offset = -1;
+
+  /* We need to mark every first buffer of a key unit as discont,
+   * and also every first buffer of a moov and moof. This ensures
+   * that qtdemux takes note of our buffer offsets for each of those
+   * buffers instead of keeping track of them itself from the first
+   * buffer. We need offsets to be consistent between moof and mdat
+   */
+  if (dashstream->is_isobmff && dashdemux->allow_trickmode_key_units
+      && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (demux)
+      && dashstream->active_stream->mimeType == GST_STREAM_VIDEO)
+    stream->discont = TRUE;
+
+  return TRUE;
+}
+
+static GstFlowReturn
+gst_dash_demux_stream_fragment_finished (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+
+  /* We need to mark every first buffer of a key unit as discont,
+   * and also every first buffer of a moov and moof. This ensures
+   * that qtdemux takes note of our buffer offsets for each of those
+   * buffers instead of keeping track of them itself from the first
+   * buffer. We need offsets to be consistent between moof and mdat
+   */
+  if (dashstream->is_isobmff && dashdemux->allow_trickmode_key_units
+      && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (demux)
+      && dashstream->active_stream->mimeType == GST_STREAM_VIDEO)
+    stream->discont = TRUE;
+
+  /* Only handle fragment advancing specifically for SIDX if we're not
+   * in key unit mode */
+  if (!(dashstream->moof_sync_samples
+          && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (dashdemux))
+      && gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client)
+      && dashstream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+    /* fragment is advanced on data_received when byte limits are reached */
+    if (dashstream->pending_seek_ts != GST_CLOCK_TIME_NONE) {
+      if (SIDX (dashstream)->entry_index < SIDX (dashstream)->entries_count)
+        return GST_FLOW_OK;
+    } else if (gst_dash_demux_stream_has_next_subfragment (stream)) {
+      return GST_FLOW_OK;
+    }
+  }
+
+  if (G_UNLIKELY (stream->downloading_header || stream->downloading_index))
+    return GST_FLOW_OK;
+
+  return gst_adaptive_demux2_stream_advance_fragment (demux, stream,
+      stream->fragment.duration);
+}
+
+static gboolean
+gst_dash_demux_need_another_chunk (GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2 *dashdemux = (GstDashDemux2 *) stream->demux;
+  GstAdaptiveDemux *demux = stream->demux;
+  GstDashDemux2Stream *dashstream = (GstDashDemux2Stream *) stream;
+  gboolean playing_forward = (demux->segment.rate > 0.0);
+
+  /* We're chunked downloading for ISOBMFF in KEY_UNITS mode for the actual
+   * fragment until we parsed the moof and arrived at the mdat. 8192 is a
+   * random guess for the moof size
+   */
+  if (dashstream->is_isobmff
+      && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux)
+      && dashstream->active_stream->mimeType == GST_STREAM_VIDEO
+      && !stream->downloading_header && !stream->downloading_index
+      && dashdemux->allow_trickmode_key_units) {
+    if (dashstream->isobmff_parser.current_fourcc != GST_ISOFF_FOURCC_MDAT) {
+      /* Need to download the moof first to know anything */
+
+      stream->fragment.chunk_size = 8192;
+      /* Do we have the first fourcc already or are we in the middle */
+      if (dashstream->isobmff_parser.current_fourcc == 0) {
+        stream->fragment.chunk_size += dashstream->moof_average_size;
+        if (dashstream->first_sync_sample_always_after_moof) {
+          gboolean first = FALSE;
+          /* Check if we'll really need that first sample */
+          if (GST_CLOCK_TIME_IS_VALID (dashstream->target_time)) {
+            first =
+                ((dashstream->target_time -
+                    dashstream->current_fragment_timestamp) /
+                dashstream->keyframe_average_distance) == 0 ? TRUE : FALSE;
+          } else if (playing_forward) {
+            first = TRUE;
+          }
+
+          if (first)
+            stream->fragment.chunk_size += dashstream->keyframe_average_size;
+        }
+      }
+
+      if (gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client) &&
+          dashstream->sidx_parser.sidx.entries) {
+        guint64 sidx_start_offset =
+            dashstream->sidx_base_offset +
+            SIDX_CURRENT_ENTRY (dashstream)->offset;
+        guint64 sidx_end_offset =
+            sidx_start_offset + SIDX_CURRENT_ENTRY (dashstream)->size;
+        guint64 downloaded_end_offset;
+
+        if (dashstream->current_offset == GST_CLOCK_TIME_NONE) {
+          downloaded_end_offset = sidx_start_offset;
+        } else {
+          downloaded_end_offset =
+              dashstream->current_offset +
+              gst_adapter_available (dashstream->adapter);
+        }
+
+        downloaded_end_offset = MAX (downloaded_end_offset, sidx_start_offset);
+
+        if (stream->fragment.chunk_size +
+            downloaded_end_offset > sidx_end_offset) {
+          stream->fragment.chunk_size = sidx_end_offset - downloaded_end_offset;
+        }
+      }
+    } else if (dashstream->moof && dashstream->moof_sync_samples) {
+      /* Have the moof, either we're done now or we want to download the
+       * directly following sync sample */
+      if (dashstream->first_sync_sample_after_moof
+          && dashstream->current_sync_sample == 0) {
+        GstDashStreamSyncSample *sync_sample =
+            &g_array_index (dashstream->moof_sync_samples,
+            GstDashStreamSyncSample, 0);
+        guint64 end_offset = sync_sample->end_offset + 1;
+        guint64 downloaded_end_offset;
+
+        downloaded_end_offset =
+            dashstream->current_offset +
+            gst_adapter_available (dashstream->adapter);
+
+        if (gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client) &&
+            dashstream->sidx_parser.sidx.entries) {
+          guint64 sidx_end_offset =
+              dashstream->sidx_base_offset +
+              SIDX_CURRENT_ENTRY (dashstream)->offset +
+              SIDX_CURRENT_ENTRY (dashstream)->size;
+
+          if (end_offset > sidx_end_offset) {
+            end_offset = sidx_end_offset;
+          }
+        }
+
+        if (downloaded_end_offset < end_offset) {
+          stream->fragment.chunk_size = end_offset - downloaded_end_offset;
+        } else {
+          stream->fragment.chunk_size = 0;
+        }
+      } else {
+        stream->fragment.chunk_size = 0;
+      }
+    } else {
+      /* Have moof but can't do key-units mode, just download until the end */
+      stream->fragment.chunk_size = -1;
+    }
+  } else {
+    /* We might've decided that we can't allow key-unit only
+     * trickmodes while doing chunked downloading. In that case
+     * just download from here to the end now */
+    if (dashstream->moof
+        && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux)) {
+      stream->fragment.chunk_size = -1;
+    } else {
+      stream->fragment.chunk_size = 0;
+    }
+  }
+
+  return stream->fragment.chunk_size != 0;
+}
+
+static GstFlowReturn
+gst_dash_demux_parse_isobmff (GstAdaptiveDemux * demux,
+    GstDashDemux2Stream * dash_stream, gboolean * sidx_seek_needed)
+{
+  GstAdaptiveDemux2Stream *stream = (GstAdaptiveDemux2Stream *) dash_stream;
+  GstDashDemux2 *dashdemux = GST_DASH_DEMUX_CAST (demux);
+  gsize available;
+  GstBuffer *buffer;
+  GstMapInfo map;
+  GstByteReader reader;
+  guint32 fourcc;
+  guint header_size;
+  guint64 size, buffer_offset;
+
+  *sidx_seek_needed = FALSE;
+
+  /* This must not be called when we're in the mdat. We only look at the mdat
+   * header and then stop parsing the boxes as we're only interested in the
+   * metadata! Handling mdat is the job of the surrounding code, as well as
+   * stopping or starting the next fragment when mdat is over (=> sidx)
+   */
+  g_assert (dash_stream->isobmff_parser.current_fourcc !=
+      GST_ISOFF_FOURCC_MDAT);
+
+  available = gst_adapter_available (dash_stream->adapter);
+  buffer = gst_adapter_take_buffer (dash_stream->adapter, available);
+  buffer_offset = dash_stream->current_offset;
+
+  /* Always at the start of a box here */
+  g_assert (dash_stream->isobmff_parser.current_size == 0);
+
+  /* At the start of a box => Parse it */
+  gst_buffer_map (buffer, &map, GST_MAP_READ);
+  gst_byte_reader_init (&reader, map.data, map.size);
+
+  /* While there are more boxes left to parse ... */
+  dash_stream->isobmff_parser.current_start_offset = buffer_offset;
+  do {
+    dash_stream->isobmff_parser.current_fourcc = 0;
+    dash_stream->isobmff_parser.current_size = 0;
+
+    if (!gst_isoff_parse_box_header (&reader, &fourcc, NULL, &header_size,
+            &size)) {
+      break;
+    }
+
+    dash_stream->isobmff_parser.current_fourcc = fourcc;
+    if (size == 0) {
+      /* We assume this is mdat, anything else with "size until end"
+       * does not seem to make sense */
+      g_assert (dash_stream->isobmff_parser.current_fourcc ==
+          GST_ISOFF_FOURCC_MDAT);
+      dash_stream->isobmff_parser.current_size = -1;
+      break;
+    }
+
+    dash_stream->isobmff_parser.current_size = size;
+
+    /* Do we have the complete box or are at MDAT */
+    if (gst_byte_reader_get_remaining (&reader) < size - header_size ||
+        dash_stream->isobmff_parser.current_fourcc == GST_ISOFF_FOURCC_MDAT) {
+      /* Reset byte reader to the beginning of the box */
+      gst_byte_reader_set_pos (&reader,
+          gst_byte_reader_get_pos (&reader) - header_size);
+      break;
+    }
+
+    GST_LOG_OBJECT (stream,
+        "box %" GST_FOURCC_FORMAT " at offset %" G_GUINT64_FORMAT " size %"
+        G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc),
+        dash_stream->isobmff_parser.current_start_offset, size);
+
+    if (dash_stream->isobmff_parser.current_fourcc == GST_ISOFF_FOURCC_MOOF) {
+      GstByteReader sub_reader;
+
+      /* Only allow SIDX before the very first moof */
+      dash_stream->allow_sidx = FALSE;
+
+      g_assert (dash_stream->moof == NULL);
+      g_assert (dash_stream->moof_sync_samples == NULL);
+      gst_byte_reader_get_sub_reader (&reader, &sub_reader, size - header_size);
+      dash_stream->moof = gst_isoff_moof_box_parse (&sub_reader);
+      dash_stream->moof_offset =
+          dash_stream->isobmff_parser.current_start_offset;
+      dash_stream->moof_size = size;
+      dash_stream->current_sync_sample = -1;
+
+      if (dash_stream->moof_average_size) {
+        if (dash_stream->moof_average_size < size)
+          dash_stream->moof_average_size =
+              (size * 3 + dash_stream->moof_average_size) / 4;
+        else
+          dash_stream->moof_average_size =
+              (size + dash_stream->moof_average_size + 3) / 4;
+      } else {
+        dash_stream->moof_average_size = size;
+      }
+    } else if (dash_stream->isobmff_parser.current_fourcc ==
+        GST_ISOFF_FOURCC_SIDX &&
+        gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client) &&
+        dash_stream->allow_sidx) {
+      GstByteReader sub_reader;
+      GstIsoffParserResult res;
+      guint dummy;
+
+      dash_stream->sidx_base_offset =
+          dash_stream->isobmff_parser.current_start_offset + size;
+      dash_stream->allow_sidx = FALSE;
+
+      gst_byte_reader_get_sub_reader (&reader, &sub_reader, size - header_size);
+
+      res =
+          gst_isoff_sidx_parser_parse (&dash_stream->sidx_parser, &sub_reader,
+          &dummy);
+
+      if (res == GST_ISOFF_PARSER_DONE) {
+        guint64 first_offset = dash_stream->sidx_parser.sidx.first_offset;
+        GstSidxBox *sidx = SIDX (dash_stream);
+        guint i;
+
+        if (first_offset) {
+          GST_LOG_OBJECT (stream,
+              "non-zero sidx first offset %" G_GUINT64_FORMAT, first_offset);
+          dash_stream->sidx_base_offset += first_offset;
+        }
+
+        for (i = 0; i < sidx->entries_count; i++) {
+          GstSidxBoxEntry *entry = &sidx->entries[i];
+
+          if (entry->ref_type != 0) {
+            GST_FIXME_OBJECT (stream, "SIDX ref_type 1 not supported yet");
+            dash_stream->sidx_position = GST_CLOCK_TIME_NONE;
+            gst_isoff_sidx_parser_clear (&dash_stream->sidx_parser);
+            break;
+          }
+        }
+
+        /* We might've cleared the index above */
+        if (sidx->entries_count > 0) {
+          if (GST_CLOCK_TIME_IS_VALID (dash_stream->pending_seek_ts)) {
+            /* FIXME, preserve seek flags */
+            if (gst_dash_demux_stream_sidx_seek (dash_stream,
+                    demux->segment.rate >= 0, 0, dash_stream->pending_seek_ts,
+                    NULL) != GST_FLOW_OK) {
+              GST_ERROR_OBJECT (stream, "Couldn't find position in sidx");
+              dash_stream->sidx_position = GST_CLOCK_TIME_NONE;
+              gst_isoff_sidx_parser_clear (&dash_stream->sidx_parser);
+            }
+            dash_stream->pending_seek_ts = GST_CLOCK_TIME_NONE;
+          } else {
+
+            if (dash_stream->sidx_position == GST_CLOCK_TIME_NONE) {
+              SIDX (dash_stream)->entry_index = 0;
+            } else {
+              if (gst_dash_demux_stream_sidx_seek (dash_stream,
+                      demux->segment.rate >= 0, GST_SEEK_FLAG_SNAP_BEFORE,
+                      dash_stream->sidx_position, NULL) != GST_FLOW_OK) {
+                GST_ERROR_OBJECT (stream, "Couldn't find position in sidx");
+                dash_stream->sidx_position = GST_CLOCK_TIME_NONE;
+                gst_isoff_sidx_parser_clear (&dash_stream->sidx_parser);
+              }
+            }
+            dash_stream->sidx_position =
+                SIDX (dash_stream)->entries[SIDX (dash_stream)->entry_index].
+                pts;
+          }
+        }
+
+        if (dash_stream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED &&
+            SIDX (dash_stream)->entry_index != 0) {
+          /* Need to jump to the requested SIDX entry. Push everything up to
+           * the SIDX box below and let the caller handle everything else */
+          *sidx_seek_needed = TRUE;
+          break;
+        }
+      }
+    } else {
+      gst_byte_reader_skip (&reader, size - header_size);
+    }
+
+    dash_stream->isobmff_parser.current_fourcc = 0;
+    dash_stream->isobmff_parser.current_start_offset += size;
+    dash_stream->isobmff_parser.current_size = 0;
+  } while (gst_byte_reader_get_remaining (&reader) > 0);
+
+  gst_buffer_unmap (buffer, &map);
+
+  /* mdat? Push all we have and wait for it to be over */
+  if (dash_stream->isobmff_parser.current_fourcc == GST_ISOFF_FOURCC_MDAT) {
+    GstBuffer *pending;
+
+    GST_LOG_OBJECT (stream,
+        "box %" GST_FOURCC_FORMAT " at offset %" G_GUINT64_FORMAT " size %"
+        G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc),
+        dash_stream->isobmff_parser.current_start_offset,
+        dash_stream->isobmff_parser.current_size);
+
+    /* At mdat. Move the start of the mdat to the adapter and have everything
+     * else be pushed. We parsed all header boxes at this point and are not
+     * supposed to be called again until the next moof */
+    pending = _gst_buffer_split (buffer, gst_byte_reader_get_pos (&reader), -1);
+    gst_adapter_push (dash_stream->adapter, pending);
+    dash_stream->current_offset += gst_byte_reader_get_pos (&reader);
+    dash_stream->isobmff_parser.current_size = 0;
+
+    GST_BUFFER_OFFSET (buffer) = buffer_offset;
+    GST_BUFFER_OFFSET_END (buffer) =
+        buffer_offset + gst_buffer_get_size (buffer);
+    return gst_adaptive_demux2_stream_push_buffer (stream, buffer);
+  } else if (gst_byte_reader_get_pos (&reader) != 0) {
+    GstBuffer *pending;
+
+    /* Multiple complete boxes and no mdat? Push them and keep the remainder,
+     * which is the start of the next box if any remainder */
+
+    pending = _gst_buffer_split (buffer, gst_byte_reader_get_pos (&reader), -1);
+    gst_adapter_push (dash_stream->adapter, pending);
+    dash_stream->current_offset += gst_byte_reader_get_pos (&reader);
+    dash_stream->isobmff_parser.current_size = 0;
+
+    GST_BUFFER_OFFSET (buffer) = buffer_offset;
+    GST_BUFFER_OFFSET_END (buffer) =
+        buffer_offset + gst_buffer_get_size (buffer);
+    return gst_adaptive_demux2_stream_push_buffer (stream, buffer);
+  }
+
+  /* Not even a single complete, non-mdat box, wait */
+  dash_stream->isobmff_parser.current_size = 0;
+  gst_adapter_push (dash_stream->adapter, buffer);
+
+  return GST_FLOW_OK;
+}
+
+static gboolean
+gst_dash_demux_find_sync_samples (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2 *dashdemux = (GstDashDemux2 *) stream->demux;
+  GstDashDemux2Stream *dash_stream = (GstDashDemux2Stream *) stream;
+  guint i;
+  guint32 track_id = 0;
+  guint64 prev_traf_end;
+  gboolean trex_sample_flags = FALSE;
+
+  if (!dash_stream->moof) {
+    dashdemux->allow_trickmode_key_units = FALSE;
+    return FALSE;
+  }
+
+  dash_stream->current_sync_sample = -1;
+  dash_stream->moof_sync_samples =
+      g_array_new (FALSE, FALSE, sizeof (GstDashStreamSyncSample));
+
+  prev_traf_end = dash_stream->moof_offset;
+
+  /* generate table of keyframes and offsets */
+  for (i = 0; i < dash_stream->moof->traf->len; i++) {
+    GstTrafBox *traf = &g_array_index (dash_stream->moof->traf, GstTrafBox, i);
+    guint64 traf_offset = 0, prev_trun_end;
+    guint j;
+
+    if (i == 0) {
+      track_id = traf->tfhd.track_id;
+    } else if (track_id != traf->tfhd.track_id) {
+      GST_ERROR_OBJECT (stream,
+          "moof with trafs of different track ids (%u != %u)", track_id,
+          traf->tfhd.track_id);
+      g_array_free (dash_stream->moof_sync_samples, TRUE);
+      dash_stream->moof_sync_samples = NULL;
+      dashdemux->allow_trickmode_key_units = FALSE;
+      return FALSE;
+    }
+
+    if (traf->tfhd.flags & GST_TFHD_FLAGS_BASE_DATA_OFFSET_PRESENT) {
+      traf_offset = traf->tfhd.base_data_offset;
+    } else if (traf->tfhd.flags & GST_TFHD_FLAGS_DEFAULT_BASE_IS_MOOF) {
+      traf_offset = dash_stream->moof_offset;
+    } else {
+      traf_offset = prev_traf_end;
+    }
+
+    prev_trun_end = traf_offset;
+
+    for (j = 0; j < traf->trun->len; j++) {
+      GstTrunBox *trun = &g_array_index (traf->trun, GstTrunBox, j);
+      guint64 trun_offset, prev_sample_end;
+      guint k;
+
+      if (trun->flags & GST_TRUN_FLAGS_DATA_OFFSET_PRESENT) {
+        trun_offset = traf_offset + trun->data_offset;
+      } else {
+        trun_offset = prev_trun_end;
+      }
+
+      prev_sample_end = trun_offset;
+      for (k = 0; k < trun->samples->len; k++) {
+        GstTrunSample *sample =
+            &g_array_index (trun->samples, GstTrunSample, k);
+        guint64 sample_offset;
+        guint32 sample_flags;
+#if 0
+        guint32 sample_duration;
+#endif
+
+        sample_offset = prev_sample_end;
+
+        if (trun->flags & GST_TRUN_FLAGS_SAMPLE_FLAGS_PRESENT) {
+          sample_flags = sample->sample_flags;
+        } else if ((trun->flags & GST_TRUN_FLAGS_FIRST_SAMPLE_FLAGS_PRESENT)
+            && k == 0) {
+          sample_flags = trun->first_sample_flags;
+        } else if (traf->tfhd.
+            flags & GST_TFHD_FLAGS_DEFAULT_SAMPLE_FLAGS_PRESENT) {
+          sample_flags = traf->tfhd.default_sample_flags;
+        } else {
+          trex_sample_flags = TRUE;
+          continue;
+        }
+
+#if 0
+        if (trun->flags & GST_TRUN_FLAGS_SAMPLE_DURATION_PRESENT) {
+          sample_duration = sample->sample_duration;
+        } else if (traf->tfhd.
+            flags & GST_TFHD_FLAGS_DEFAULT_SAMPLE_DURATION_PRESENT) {
+          sample_duration = traf->tfhd.default_sample_duration;
+        } else {
+          GST_FIXME_OBJECT (stream,
+              "Sample duration given by trex - can't download only keyframes");
+          g_array_free (dash_stream->moof_sync_samples, TRUE);
+          dash_stream->moof_sync_samples = NULL;
+          return FALSE;
+        }
+#endif
+
+        if (trun->flags & GST_TRUN_FLAGS_SAMPLE_SIZE_PRESENT) {
+          prev_sample_end += sample->sample_size;
+        } else if (traf->tfhd.
+            flags & GST_TFHD_FLAGS_DEFAULT_SAMPLE_SIZE_PRESENT) {
+          prev_sample_end += traf->tfhd.default_sample_size;
+        } else {
+          GST_FIXME_OBJECT (stream,
+              "Sample size given by trex - can't download only keyframes");
+          g_array_free (dash_stream->moof_sync_samples, TRUE);
+          dash_stream->moof_sync_samples = NULL;
+          dashdemux->allow_trickmode_key_units = FALSE;
+          return FALSE;
+        }
+
+        /* Non-non-sync sample aka sync sample */
+        if (!GST_ISOFF_SAMPLE_FLAGS_SAMPLE_IS_NON_SYNC_SAMPLE (sample_flags) ||
+            GST_ISOFF_SAMPLE_FLAGS_SAMPLE_DEPENDS_ON (sample_flags) == 2) {
+          GstDashStreamSyncSample sync_sample =
+              { sample_offset, prev_sample_end - 1 };
+          /* TODO: need timestamps so we can decide to download or not */
+          g_array_append_val (dash_stream->moof_sync_samples, sync_sample);
+        }
+      }
+
+      prev_trun_end = prev_sample_end;
+    }
+
+    prev_traf_end = prev_trun_end;
+  }
+
+  if (trex_sample_flags) {
+    if (dash_stream->moof_sync_samples->len > 0) {
+      GST_LOG_OBJECT (stream,
+          "Some sample flags given by trex but still found sync samples");
+    } else {
+      GST_FIXME_OBJECT (stream,
+          "Sample flags given by trex - can't download only keyframes");
+      g_array_free (dash_stream->moof_sync_samples, TRUE);
+      dash_stream->moof_sync_samples = NULL;
+      dashdemux->allow_trickmode_key_units = FALSE;
+      return FALSE;
+    }
+  }
+
+  if (dash_stream->moof_sync_samples->len == 0) {
+    GST_LOG_OBJECT (stream, "No sync samples found in fragment");
+    g_array_free (dash_stream->moof_sync_samples, TRUE);
+    dash_stream->moof_sync_samples = NULL;
+    dashdemux->allow_trickmode_key_units = FALSE;
+    return FALSE;
+  }
+
+  {
+    GstDashStreamSyncSample *sync_sample;
+    guint i;
+    guint size;
+    GstClockTime current_keyframe_distance;
+
+    for (i = 0; i < dash_stream->moof_sync_samples->len; i++) {
+      sync_sample =
+          &g_array_index (dash_stream->moof_sync_samples,
+          GstDashStreamSyncSample, i);
+      size = sync_sample->end_offset + 1 - sync_sample->start_offset;
+
+      if (dash_stream->keyframe_average_size) {
+        /* Over-estimate the keyframe size */
+        if (dash_stream->keyframe_average_size < size)
+          dash_stream->keyframe_average_size =
+              (size * 3 + dash_stream->keyframe_average_size) / 4;
+        else
+          dash_stream->keyframe_average_size =
+              (size + dash_stream->keyframe_average_size * 3) / 4;
+      } else {
+        dash_stream->keyframe_average_size = size;
+      }
+
+      if (i == 0) {
+        if (dash_stream->moof_offset + dash_stream->moof_size + 8 <
+            sync_sample->start_offset) {
+          dash_stream->first_sync_sample_after_moof = FALSE;
+          dash_stream->first_sync_sample_always_after_moof = FALSE;
+        } else {
+          dash_stream->first_sync_sample_after_moof =
+              (dash_stream->moof_sync_samples->len == 1
+              || demux->segment.rate > 0.0);
+        }
+      }
+    }
+
+    g_assert (stream->fragment.duration != 0);
+    g_assert (stream->fragment.duration != GST_CLOCK_TIME_NONE);
+
+    if (gst_mpd_client2_has_isoff_ondemand_profile (dashdemux->client)
+        && dash_stream->sidx_position != GST_CLOCK_TIME_NONE
+        && SIDX (dash_stream)->entries) {
+      GstSidxBoxEntry *entry = SIDX_CURRENT_ENTRY (dash_stream);
+      current_keyframe_distance =
+          entry->duration / dash_stream->moof_sync_samples->len;
+    } else {
+      current_keyframe_distance =
+          stream->fragment.duration / dash_stream->moof_sync_samples->len;
+    }
+    dash_stream->current_fragment_keyframe_distance = current_keyframe_distance;
+
+    if (dash_stream->keyframe_average_distance) {
+      /* Under-estimate the keyframe distance */
+      if (dash_stream->keyframe_average_distance > current_keyframe_distance)
+        dash_stream->keyframe_average_distance =
+            (dash_stream->keyframe_average_distance * 3 +
+            current_keyframe_distance) / 4;
+      else
+        dash_stream->keyframe_average_distance =
+            (dash_stream->keyframe_average_distance +
+            current_keyframe_distance * 3) / 4;
+    } else {
+      dash_stream->keyframe_average_distance = current_keyframe_distance;
+    }
+
+    GST_DEBUG_OBJECT (stream,
+        "average keyframe sample size: %" G_GUINT64_FORMAT,
+        dash_stream->keyframe_average_size);
+    GST_DEBUG_OBJECT (stream,
+        "average keyframe distance: %" GST_TIME_FORMAT " (%" GST_TIME_FORMAT
+        ")", GST_TIME_ARGS (dash_stream->keyframe_average_distance),
+        GST_TIME_ARGS (current_keyframe_distance));
+    GST_DEBUG_OBJECT (stream, "first sync sample after moof: %d",
+        dash_stream->first_sync_sample_after_moof);
+  }
+
+  return TRUE;
+}
+
+
+static GstFlowReturn
+gst_dash_demux_handle_isobmff (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstDashDemux2Stream *dash_stream = (GstDashDemux2Stream *) stream;
+  GstFlowReturn ret = GST_FLOW_OK;
+  GstBuffer *buffer;
+  gboolean sidx_advance = FALSE;
+
+  /* We parse all ISOBMFF boxes of a (sub)fragment until the mdat. This covers
+   * at least moov, moof and sidx boxes. Once mdat is received we just output
+   * everything until the next (sub)fragment */
+  if (dash_stream->isobmff_parser.current_fourcc != GST_ISOFF_FOURCC_MDAT) {
+    gboolean sidx_seek_needed = FALSE;
+
+    ret = gst_dash_demux_parse_isobmff (demux, dash_stream, &sidx_seek_needed);
+    if (ret != GST_FLOW_OK)
+      return ret;
+
+    /* Go to selected segment if needed here */
+    if (sidx_seek_needed && !stream->downloading_index)
+      return GST_ADAPTIVE_DEMUX_FLOW_END_OF_FRAGMENT;
+
+    /* No mdat yet, let's get called again with the next boxes */
+    if (dash_stream->isobmff_parser.current_fourcc != GST_ISOFF_FOURCC_MDAT)
+      return ret;
+
+    /* Here we end up only if we're right at the mdat start */
+
+    /* Jump to the next sync sample. As we're doing chunked downloading
+     * here, just drop data until our chunk is over so we can reuse the
+     * HTTP connection instead of having to create a new one or
+     * reuse the data if the sync sample follows the moof */
+    if (dash_stream->active_stream->mimeType == GST_STREAM_VIDEO
+        && gst_dash_demux_find_sync_samples (demux, stream) &&
+        GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux)) {
+      guint idx = -1;
+      gboolean playing_forward = (demux->segment.rate > 0.0);
+
+      if (GST_CLOCK_TIME_IS_VALID (dash_stream->target_time)) {
+        idx =
+            (dash_stream->target_time -
+            dash_stream->current_fragment_timestamp) /
+            dash_stream->current_fragment_keyframe_distance;
+      } else if (playing_forward) {
+        idx = 0;
+      }
+
+      GST_DEBUG_OBJECT (stream,
+          "target %" GST_TIME_FORMAT " idx %d",
+          GST_TIME_ARGS (dash_stream->target_time), idx);
+      /* Figure out target time */
+
+      if (dash_stream->first_sync_sample_after_moof && idx == 0) {
+        /* If we're here, don't throw away data but collect sync
+         * sample while we're at it below. We're doing chunked
+         * downloading so might need to adjust the next chunk size for
+         * the remainder */
+        dash_stream->current_sync_sample = 0;
+        GST_DEBUG_OBJECT (stream, "Using first keyframe after header");
+      }
+    }
+
+    if (gst_adapter_available (dash_stream->adapter) == 0)
+      return ret;
+
+    /* We have some data from the mdat available in the adapter, handle it
+     * below in the push code */
+  } else {
+    /* Somewhere in the middle of the mdat */
+  }
+
+  /* At mdat */
+  if (dash_stream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+    guint64 sidx_end_offset =
+        dash_stream->sidx_base_offset +
+        SIDX_CURRENT_ENTRY (dash_stream)->offset +
+        SIDX_CURRENT_ENTRY (dash_stream)->size;
+    gboolean has_next = gst_dash_demux_stream_has_next_subfragment (stream);
+    gsize available;
+
+    /* Need to handle everything in the adapter according to the parsed SIDX
+     * and advance subsegments accordingly */
+
+    available = gst_adapter_available (dash_stream->adapter);
+    if (dash_stream->current_offset + available < sidx_end_offset) {
+      buffer = gst_adapter_take_buffer (dash_stream->adapter, available);
+    } else if (!has_next && sidx_end_offset <= dash_stream->current_offset) {
+      /* Drain all bytes, since there might be trailing bytes at the end of subfragment */
+      buffer = gst_adapter_take_buffer (dash_stream->adapter, available);
+    } else if (sidx_end_offset <= dash_stream->current_offset) {
+      /* This means a corrupted stream or a bug: ignoring bugs, it
+       * should only happen if the SIDX index is corrupt */
+      GST_ERROR_OBJECT (stream, "Invalid SIDX state. "
+          " sidx_end_offset %" G_GUINT64_FORMAT " current offset %"
+          G_GUINT64_FORMAT, sidx_end_offset, dash_stream->current_offset);
+      gst_adapter_clear (dash_stream->adapter);
+      return GST_FLOW_ERROR;
+    } else {
+      buffer =
+          gst_adapter_take_buffer (dash_stream->adapter,
+          sidx_end_offset - dash_stream->current_offset);
+      sidx_advance = TRUE;
+    }
+  } else {
+    /* Take it all and handle it further below */
+    buffer =
+        gst_adapter_take_buffer (dash_stream->adapter,
+        gst_adapter_available (dash_stream->adapter));
+
+    /* Attention: All code paths below need to update dash_stream->current_offset */
+  }
+
+  /* We're actually running in key-units trick mode */
+  if (dash_stream->active_stream->mimeType == GST_STREAM_VIDEO
+      && dash_stream->moof_sync_samples
+      && GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS (stream->demux)) {
+    if (dash_stream->current_sync_sample == -1) {
+      /* We're doing chunked downloading and wait for finishing the current
+       * chunk so we can jump to the first keyframe */
+      dash_stream->current_offset += gst_buffer_get_size (buffer);
+      gst_buffer_unref (buffer);
+      return GST_FLOW_OK;
+    } else {
+      GstDashStreamSyncSample *sync_sample =
+          &g_array_index (dash_stream->moof_sync_samples,
+          GstDashStreamSyncSample, dash_stream->current_sync_sample);
+      guint64 end_offset =
+          dash_stream->current_offset + gst_buffer_get_size (buffer);
+
+      /* Make sure to not download too much, this should only happen for
+       * the very first keyframe if it follows the moof */
+      if (dash_stream->current_offset >= sync_sample->end_offset + 1) {
+        dash_stream->current_offset += gst_buffer_get_size (buffer);
+        gst_buffer_unref (buffer);
+        return GST_FLOW_OK;
+      } else if (end_offset > sync_sample->end_offset + 1) {
+        guint64 remaining =
+            sync_sample->end_offset + 1 - dash_stream->current_offset;
+        GstBuffer *sub = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, 0,
+            remaining);
+        gst_buffer_unref (buffer);
+        buffer = sub;
+      }
+    }
+  }
+
+  GST_BUFFER_OFFSET (buffer) = dash_stream->current_offset;
+  dash_stream->current_offset += gst_buffer_get_size (buffer);
+  GST_BUFFER_OFFSET_END (buffer) = dash_stream->current_offset;
+
+  ret = gst_adaptive_demux2_stream_push_buffer (stream, buffer);
+  if (ret != GST_FLOW_OK)
+    return ret;
+
+  if (sidx_advance) {
+    ret =
+        gst_adaptive_demux2_stream_advance_fragment (demux, stream,
+        SIDX_CURRENT_ENTRY (dash_stream)->duration);
+    if (ret != GST_FLOW_OK)
+      return ret;
+
+    /* If we still have data available, recurse and use it up if possible */
+    if (gst_adapter_available (dash_stream->adapter) > 0)
+      return gst_dash_demux_handle_isobmff (demux, stream);
+  }
+
+  return ret;
+}
+
+static GstFlowReturn
+gst_dash_demux_data_received (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer)
+{
+  GstDashDemux2Stream *dash_stream = (GstDashDemux2Stream *) stream;
+  GstFlowReturn ret = GST_FLOW_OK;
+  guint index_header_or_data;
+
+  if (stream->downloading_index)
+    index_header_or_data = 1;
+  else if (stream->downloading_header)
+    index_header_or_data = 2;
+  else
+    index_header_or_data = 3;
+
+  if (dash_stream->current_index_header_or_data != index_header_or_data) {
+    /* Clear pending data */
+    if (gst_adapter_available (dash_stream->adapter) != 0)
+      GST_ERROR_OBJECT (stream,
+          "Had pending SIDX data after switch between index/header/data");
+    gst_adapter_clear (dash_stream->adapter);
+    dash_stream->current_index_header_or_data = index_header_or_data;
+    dash_stream->current_offset = -1;
+  }
+
+  if (dash_stream->current_offset == -1)
+    dash_stream->current_offset =
+        GST_BUFFER_OFFSET_IS_VALID (buffer) ? GST_BUFFER_OFFSET (buffer) : 0;
+
+  gst_adapter_push (dash_stream->adapter, buffer);
+  buffer = NULL;
+
+  if (dash_stream->is_isobmff || stream->downloading_index) {
+    /* SIDX index is also ISOBMMF */
+    ret = gst_dash_demux_handle_isobmff (demux, stream);
+  } else if (dash_stream->sidx_parser.status == GST_ISOFF_SIDX_PARSER_FINISHED) {
+    gsize available;
+
+    /* Not ISOBMFF but had a SIDX index. Does this even exist or work? */
+    while (ret == GST_FLOW_OK
+        && ((available = gst_adapter_available (dash_stream->adapter)) > 0)) {
+      gboolean advance = FALSE;
+      guint64 sidx_end_offset =
+          dash_stream->sidx_base_offset +
+          SIDX_CURRENT_ENTRY (dash_stream)->offset +
+          SIDX_CURRENT_ENTRY (dash_stream)->size;
+      gboolean has_next = gst_dash_demux_stream_has_next_subfragment (stream);
+
+      if (dash_stream->current_offset + available < sidx_end_offset) {
+        buffer = gst_adapter_take_buffer (dash_stream->adapter, available);
+      } else {
+        if (!has_next && sidx_end_offset <= dash_stream->current_offset) {
+          /* Drain all bytes, since there might be trailing bytes at the end of subfragment */
+          buffer = gst_adapter_take_buffer (dash_stream->adapter, available);
+        } else {
+          if (sidx_end_offset <= dash_stream->current_offset) {
+            /* This means a corrupted stream or a bug: ignoring bugs, it
+             * should only happen if the SIDX index is corrupt */
+            GST_ERROR_OBJECT (stream, "Invalid SIDX state");
+            gst_adapter_clear (dash_stream->adapter);
+            ret = GST_FLOW_ERROR;
+            break;
+          } else {
+            buffer =
+                gst_adapter_take_buffer (dash_stream->adapter,
+                sidx_end_offset - dash_stream->current_offset);
+            advance = TRUE;
+          }
+        }
+      }
+
+      GST_BUFFER_OFFSET (buffer) = dash_stream->current_offset;
+      GST_BUFFER_OFFSET_END (buffer) =
+          GST_BUFFER_OFFSET (buffer) + gst_buffer_get_size (buffer);
+      dash_stream->current_offset = GST_BUFFER_OFFSET_END (buffer);
+
+      ret = gst_adaptive_demux2_stream_push_buffer (stream, buffer);
+
+      if (advance) {
+        if (has_next) {
+          GstFlowReturn new_ret;
+          new_ret =
+              gst_adaptive_demux2_stream_advance_fragment (demux, stream,
+              SIDX_CURRENT_ENTRY (dash_stream)->duration);
+
+          /* only overwrite if it was OK before */
+          if (ret == GST_FLOW_OK)
+            ret = new_ret;
+        } else {
+          break;
+        }
+      }
+    }
+  } else {
+    /* this should be the main header, just push it all */
+    buffer = gst_adapter_take_buffer (dash_stream->adapter,
+        gst_adapter_available (dash_stream->adapter));
+
+    GST_BUFFER_OFFSET (buffer) = dash_stream->current_offset;
+    GST_BUFFER_OFFSET_END (buffer) =
+        GST_BUFFER_OFFSET (buffer) + gst_buffer_get_size (buffer);
+    dash_stream->current_offset = GST_BUFFER_OFFSET_END (buffer);
+
+    ret = gst_adaptive_demux2_stream_push_buffer (stream, buffer);
+  }
+
+  return ret;
+}
+
+static GstDashDemux2ClockDrift *
+gst_dash_demux_clock_drift_new (GstDashDemux2 * demux)
+{
+  GstDashDemux2ClockDrift *clock_drift;
+
+  clock_drift = g_slice_new0 (GstDashDemux2ClockDrift);
+  g_mutex_init (&clock_drift->clock_lock);
+  clock_drift->next_update =
+      GST_TIME_AS_USECONDS (gst_adaptive_demux2_get_monotonic_time
+      (GST_ADAPTIVE_DEMUX_CAST (demux)));
+  return clock_drift;
+}
+
+static void
+gst_dash_demux_clock_drift_free (GstDashDemux2ClockDrift * clock_drift)
+{
+  if (clock_drift) {
+    g_mutex_lock (&clock_drift->clock_lock);
+    if (clock_drift->ntp_clock)
+      g_object_unref (clock_drift->ntp_clock);
+    g_mutex_unlock (&clock_drift->clock_lock);
+    g_mutex_clear (&clock_drift->clock_lock);
+    g_slice_free (GstDashDemux2ClockDrift, clock_drift);
+  }
+}
+
+/*
+ * The value attribute of the UTCTiming element contains a white-space
+ * separated list of servers that are recommended to be used in
+ * combination with the NTP protocol as defined in IETF RFC 5905 for
+ * getting the appropriate time.
+ *
+ * The DASH standard does not specify which version of NTP. This
+ * function only works with NTPv4 servers.
+*/
+static GstDateTime *
+gst_dash_demux_poll_ntp_server (GstDashDemux2ClockDrift * clock_drift,
+    gchar ** urls)
+{
+  GstClockTime ntp_clock_time;
+  GDateTime *dt, *dt2;
+
+  if (!clock_drift->ntp_clock) {
+    GResolver *resolver;
+    GList *inet_addrs;
+    GError *err = NULL;
+    gchar *ip_addr;
+
+    resolver = g_resolver_get_default ();
+    /* We don't round-robin NTP servers. If the manifest specifies multiple
+       NTP time servers, select one at random */
+    clock_drift->selected_url = g_random_int_range (0, g_strv_length (urls));
+
+    GST_DEBUG ("Connecting to NTP time server %s",
+        urls[clock_drift->selected_url]);
+    inet_addrs = g_resolver_lookup_by_name (resolver,
+        urls[clock_drift->selected_url], NULL, &err);
+    g_object_unref (resolver);
+    if (!inet_addrs || g_list_length (inet_addrs) == 0) {
+      GST_ERROR ("Failed to resolve hostname of NTP server: %s",
+          err ? (err->message) : "unknown error");
+      if (inet_addrs)
+        g_resolver_free_addresses (inet_addrs);
+      if (err)
+        g_error_free (err);
+      return NULL;
+    }
+    ip_addr =
+        g_inet_address_to_string ((GInetAddress
+            *) (g_list_first (inet_addrs)->data));
+    clock_drift->ntp_clock = gst_ntp_clock_new ("dashntp", ip_addr, 123, 0);
+    g_free (ip_addr);
+    g_resolver_free_addresses (inet_addrs);
+    if (!clock_drift->ntp_clock) {
+      GST_ERROR ("Failed to create NTP clock");
+      return NULL;
+    }
+    /* FIXME: Don't block and wait, trigger an update when the clock syncs up,
+     * or just wait and check later */
+    if (!gst_clock_wait_for_sync (clock_drift->ntp_clock, 5 * GST_SECOND)) {
+      g_object_unref (clock_drift->ntp_clock);
+      clock_drift->ntp_clock = NULL;
+      GST_ERROR ("Failed to lock to NTP clock");
+      return NULL;
+    }
+  }
+  ntp_clock_time = gst_clock_get_time (clock_drift->ntp_clock);
+  if (ntp_clock_time == GST_CLOCK_TIME_NONE) {
+    GST_ERROR ("Failed to get time from NTP clock");
+    return NULL;
+  }
+  ntp_clock_time -= NTP_TO_UNIX_EPOCH * GST_SECOND;
+  dt = g_date_time_new_from_unix_utc (ntp_clock_time / GST_SECOND);
+  if (!dt) {
+    GST_ERROR ("Failed to create GstDateTime");
+    return NULL;
+  }
+  ntp_clock_time =
+      gst_util_uint64_scale (ntp_clock_time % GST_SECOND, 1000000, GST_SECOND);
+  dt2 = g_date_time_add (dt, ntp_clock_time);
+  g_date_time_unref (dt);
+  return gst_date_time_new_from_g_date_time (dt2);
+}
+
+static GstDateTime *
+gst_dash_demux_parse_http_head (GstDashDemux2ClockDrift * clock_drift,
+    DownloadRequest * download)
+{
+  const GstStructure *response_headers;
+  const gchar *http_date;
+  const GValue *val;
+
+  g_return_val_if_fail (download != NULL, NULL);
+  g_return_val_if_fail (download->headers != NULL, NULL);
+
+  val = gst_structure_get_value (download->headers, "response-headers");
+  if (!val) {
+    return NULL;
+  }
+
+  response_headers = gst_value_get_structure (val);
+  http_date = gst_structure_get_string (response_headers, "Date");
+  if (!http_date) {
+    return NULL;
+  }
+
+  return gst_adaptive_demux_util_parse_http_head_date (http_date);
+}
+
+/*
+   The timing information is contained in the message body of the HTTP
+   response and contains a time value formatted according to NTP timestamp
+   format in IETF RFC 5905.
+
+       0                   1                   2                   3
+       0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+      +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+      |                            Seconds                            |
+      +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+      |                            Fraction                           |
+      +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+                             NTP Timestamp Format
+*/
+static GstDateTime *
+gst_dash_demux_parse_http_ntp (GstDashDemux2ClockDrift * clock_drift,
+    GstBuffer * buffer)
+{
+  gint64 seconds;
+  guint64 fraction;
+  GDateTime *dt, *dt2;
+  GstMapInfo mapinfo;
+
+  /* See https://tools.ietf.org/html/rfc5905#page-12 for details of
+     the NTP Timestamp Format */
+  gst_buffer_map (buffer, &mapinfo, GST_MAP_READ);
+  if (mapinfo.size != 8) {
+    gst_buffer_unmap (buffer, &mapinfo);
+    return NULL;
+  }
+  seconds = GST_READ_UINT32_BE (mapinfo.data);
+  fraction = GST_READ_UINT32_BE (mapinfo.data + 4);
+  gst_buffer_unmap (buffer, &mapinfo);
+  fraction = gst_util_uint64_scale (fraction, 1000000,
+      G_GUINT64_CONSTANT (1) << 32);
+  /* subtract constant to convert from 1900 based time to 1970 based time */
+  seconds -= NTP_TO_UNIX_EPOCH;
+  dt = g_date_time_new_from_unix_utc (seconds);
+  dt2 = g_date_time_add (dt, fraction);
+  g_date_time_unref (dt);
+  return gst_date_time_new_from_g_date_time (dt2);
+}
+
+/*
+  The timing information is contained in the message body of the
+  HTTP response and contains a time value formatted according to
+  xs:dateTime as defined in W3C XML Schema Part 2: Datatypes specification.
+*/
+static GstDateTime *
+gst_dash_demux_parse_http_xsdate (GstDashDemux2ClockDrift * clock_drift,
+    GstBuffer * buffer)
+{
+  GstDateTime *value = NULL;
+  GstMapInfo mapinfo;
+
+  /* the string from the server might not be zero terminated */
+  if (gst_buffer_map (buffer, &mapinfo, GST_MAP_READ)) {
+    gchar *str;
+    str = g_strndup ((const gchar *) mapinfo.data, mapinfo.size);
+    gst_buffer_unmap (buffer, &mapinfo);
+    value = gst_date_time_new_from_iso8601_string (str);
+    g_free (str);
+  }
+  return value;
+}
+
+static void
+handle_poll_clock_download_failure (DownloadRequest * request,
+    DownloadRequestState state, GstDashDemux2 * demux)
+{
+  GstAdaptiveDemux *ademux = GST_ADAPTIVE_DEMUX_CAST (demux);
+  GstDashDemux2ClockDrift *clock_drift = demux->clock_drift;
+  gint64 now =
+      GST_TIME_AS_USECONDS (gst_adaptive_demux2_get_monotonic_time (ademux));
+
+  GST_ERROR_OBJECT (demux, "Failed to receive DateTime from server");
+  clock_drift->next_update = now + FAST_CLOCK_UPDATE_INTERVAL;
+}
+
+static void
+handle_poll_clock_download_complete (DownloadRequest * request,
+    DownloadRequestState state, GstDashDemux2 * demux)
+{
+  GstAdaptiveDemux *ademux = GST_ADAPTIVE_DEMUX_CAST (demux);
+  GstDashDemux2ClockDrift *clock_drift = demux->clock_drift;
+
+  GDateTime *now_utc = gst_adaptive_demux2_get_client_now_utc (ademux);
+  gint64 now_us =
+      GST_TIME_AS_USECONDS (gst_adaptive_demux2_get_monotonic_time (ademux));
+  GstClockTimeDiff download_duration;
+  GTimeSpan download_offset;
+  GDateTime *client_now, *server_now;
+  GstDateTime *value = NULL;
+  GstBuffer *buffer = NULL;
+
+  if (request->headers)
+    value = gst_dash_demux_parse_http_head (clock_drift, request);
+
+  if (value == NULL) {
+    buffer = download_request_take_buffer (request);
+
+    if (clock_drift->method == GST_MPD_UTCTIMING_TYPE_HTTP_NTP) {
+      value = gst_dash_demux_parse_http_ntp (clock_drift, buffer);
+    } else {
+      /* GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE or GST_MPD_UTCTIMING_TYPE_HTTP_ISO */
+      value = gst_dash_demux_parse_http_xsdate (clock_drift, buffer);
+    }
+  }
+
+  if (buffer)
+    gst_buffer_unref (buffer);
+
+  if (!value)
+    goto fail;
+
+  server_now = gst_date_time_to_g_date_time (value);
+  gst_date_time_unref (value);
+
+  /* If gst_date_time_new_from_iso8601_string is given an unsupported
+     ISO 8601 format, it can return a GstDateTime that is not valid,
+     which causes gst_date_time_to_g_date_time to return NULL */
+  if (!server_now)
+    goto fail;
+
+  /* We don't know when the server sampled its clock, but a reasonable
+   * estimate is midway between the download request and the result */
+  download_duration =
+      GST_CLOCK_DIFF (request->download_start_time, request->download_end_time);
+  download_offset =
+      G_TIME_SPAN_MILLISECOND * GST_TIME_AS_MSECONDS (-download_duration / 2);
+
+  client_now = g_date_time_add (now_utc, download_offset);
+
+  g_mutex_lock (&clock_drift->clock_lock);
+  clock_drift->clock_compensation =
+      g_date_time_difference (server_now, client_now);
+  g_mutex_unlock (&clock_drift->clock_lock);
+
+  GST_DEBUG_OBJECT (demux,
+      "Difference between client and server clocks is %lfs",
+      ((double) clock_drift->clock_compensation) / 1000000.0);
+
+  g_date_time_unref (server_now);
+  g_date_time_unref (client_now);
+  g_date_time_unref (now_utc);
+
+  clock_drift->next_update = now_us + SLOW_CLOCK_UPDATE_INTERVAL;
+  return;
+
+fail:
+  GST_ERROR_OBJECT (demux, "Failed to parse DateTime from server");
+  clock_drift->next_update = now_us + FAST_CLOCK_UPDATE_INTERVAL;
+
+  g_date_time_unref (now_utc);
+}
+
+static void
+gst_dash_demux_poll_clock_drift (GstDashDemux2 * demux)
+{
+  GstAdaptiveDemux *ademux = GST_ADAPTIVE_DEMUX_CAST (demux);
+  GstDashDemux2ClockDrift *clock_drift;
+  GstDateTime *value = NULL;
+  gint64 now;
+  GstMPDUTCTimingType method;
+  gchar **urls;
+
+  g_return_if_fail (demux != NULL);
+  g_return_if_fail (demux->clock_drift != NULL);
+
+  clock_drift = demux->clock_drift;
+  now = GST_TIME_AS_USECONDS (gst_adaptive_demux2_get_monotonic_time (ademux));
+  if (now < clock_drift->next_update) {
+    /*TODO: If a fragment fails to download in adaptivedemux, it waits
+       for a manifest reload before another attempt to fetch a fragment.
+       Section 10.8.6 of the DVB-DASH standard states that the DASH client
+       shall refresh the manifest and resynchronise to one of the time sources.
+
+       Currently the fact that the manifest refresh follows a download failure
+       does not make it into dashdemux. */
+    return;
+  }
+
+  urls = gst_mpd_client2_get_utc_timing_sources (demux->client,
+      SUPPORTED_CLOCK_FORMATS, &method);
+  if (!urls)
+    return;
+
+  g_mutex_lock (&clock_drift->clock_lock);
+
+  /* Update selected_url just in case the number of URLs in the UTCTiming
+     element has shrunk since the last poll */
+  clock_drift->selected_url = clock_drift->selected_url % g_strv_length (urls);
+  clock_drift->method = method;
+
+  if (method == GST_MPD_UTCTIMING_TYPE_NTP) {
+    GDateTime *client_now = NULL, *server_now = NULL;
+
+    value = gst_dash_demux_poll_ntp_server (clock_drift, urls);
+    if (value) {
+      server_now = gst_date_time_to_g_date_time (value);
+      gst_date_time_unref (value);
+    }
+
+    clock_drift->next_update = now + FAST_CLOCK_UPDATE_INTERVAL;
+
+    if (server_now == NULL) {
+      GST_ERROR_OBJECT (demux, "Failed to fetch time from NTP server %s",
+          urls[clock_drift->selected_url]);
+      g_mutex_unlock (&clock_drift->clock_lock);
+      return;
+    }
+
+    client_now = gst_adaptive_demux2_get_client_now_utc (ademux);
+
+    clock_drift->clock_compensation =
+        g_date_time_difference (server_now, client_now);
+
+    g_date_time_unref (server_now);
+    g_date_time_unref (client_now);
+  }
+
+  if (!value) {
+    DownloadRequest *request;
+    DownloadFlags dl_flags =
+        DOWNLOAD_FLAG_COMPRESS | DOWNLOAD_FLAG_FORCE_REFRESH;
+
+    GST_DEBUG_OBJECT (demux, "Fetching current time from %s",
+        urls[clock_drift->selected_url]);
+
+    if (method == GST_MPD_UTCTIMING_TYPE_HTTP_HEAD)
+      dl_flags |= DOWNLOAD_FLAG_HEADERS_ONLY;
+
+    request = download_request_new_uri (urls[clock_drift->selected_url]);
+
+    download_request_set_callbacks (request,
+        (DownloadRequestEventCallback) handle_poll_clock_download_complete,
+        (DownloadRequestEventCallback) handle_poll_clock_download_failure,
+        NULL, NULL, demux);
+
+    if (!downloadhelper_submit_request (ademux->download_helper, NULL, dl_flags,
+            request, NULL))
+      clock_drift->next_update = now + FAST_CLOCK_UPDATE_INTERVAL;
+
+    download_request_unref (request);
+  }
+
+  /* if multiple URLs were specified, use a simple round-robin to
+     poll each server */
+  clock_drift->selected_url =
+      (1 + clock_drift->selected_url) % g_strv_length (urls);
+
+  g_mutex_unlock (&clock_drift->clock_lock);
+}
+
+static GTimeSpan
+gst_dash_demux_get_clock_compensation (GstDashDemux2 * demux)
+{
+  GTimeSpan rv = 0;
+  if (demux->clock_drift) {
+    g_mutex_lock (&demux->clock_drift->clock_lock);
+    rv = demux->clock_drift->clock_compensation;
+    g_mutex_unlock (&demux->clock_drift->clock_lock);
+  }
+  GST_LOG_OBJECT (demux, "Clock drift %" GST_STIME_FORMAT,
+      GST_STIME_ARGS (rv * GST_USECOND));
+  return rv;
+}
+
+static GDateTime *
+gst_dash_demux_get_server_now_utc (GstDashDemux2 * demux)
+{
+  GDateTime *client_now;
+  GDateTime *server_now;
+
+  client_now =
+      gst_adaptive_demux2_get_client_now_utc (GST_ADAPTIVE_DEMUX_CAST (demux));
+  server_now =
+      g_date_time_add (client_now,
+      gst_dash_demux_get_clock_compensation (demux));
+  g_date_time_unref (client_now);
+  return server_now;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstdashdemux.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstdashdemux.h
new file mode 100644 (file)
index 0000000..44307da
--- /dev/null
@@ -0,0 +1,172 @@
+/*
+ * DASH demux plugin for GStreamer
+ *
+ * gstdashdemux.h
+ *
+ * Copyright (C) 2012 Orange
+ * Authors:
+ *   David Corvoysier <david.corvoysier@orange.com>
+ *   Hamid Zakari <hamid.zakari@gmail.com>
+ *
+ * Copyright (C) 2013 Smart TV Alliance
+ *  Author: Thiago Sousa Santos <thiago.sousa.santos@collabora.com>, Collabora Ltd.
+ *
+ * Copyright (C) 2021 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_DASH_DEMUX_H__
+#define __GST_DASH_DEMUX_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/base/gstdataqueue.h>
+
+#include "gstisoff.h"
+#include "gstadaptivedemux.h"
+
+#include "gstmpdclient.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_DASH_DEMUX2 \
+        (gst_dash_demux2_get_type())
+#define GST_DASH_DEMUX(obj) \
+        (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DASH_DEMUX2,GstDashDemux2))
+#define GST_DASH_DEMUX_CLASS(klass) \
+        (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_DASH_DEMUX2,GstDashDemux2Class))
+#define GST_IS_DASH_DEMUX(obj) \
+        (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_DASH_DEMUX2))
+#define GST_IS_DASH_DEMUX_CLASS(klass) \
+        (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_DASH_DEMUX2))
+#define GST_DASH_DEMUX_CAST(obj) \
+       ((GstDashDemux2 *)obj)
+
+#define GST_TYPE_DASH_DEMUX_STREAM \
+  (gst_dash_demux_stream_get_type())
+#define GST_DASH_DEMUX_STREAM(obj) \
+  (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DASH_DEMUX_STREAM,GstDashDemux2Stream))
+#define GST_DASH_DEMUX_STREAM_CAST(obj) ((GstDashDemux2Stream *)obj)
+
+typedef struct _GstDashDemux2Stream GstDashDemux2Stream;
+typedef GstAdaptiveDemux2StreamClass GstDashDemux2StreamClass;
+
+typedef struct _GstDashDemux2ClockDrift GstDashDemux2ClockDrift;
+typedef struct _GstDashDemux2 GstDashDemux2;
+typedef struct _GstDashDemux2Class GstDashDemux2Class;
+
+struct _GstDashDemux2Stream
+{
+  GstAdaptiveDemux2Stream parent;
+
+  gint index;
+  GstActiveStream *active_stream;
+
+  /* Track provided by this stream */
+  GstAdaptiveDemuxTrack *track;
+
+  GstMediaFragmentInfo current_fragment;
+
+  /* index parsing */
+  GstSidxParser sidx_parser;
+  GstClockTime sidx_position;
+  gint64 sidx_base_offset;
+  gboolean allow_sidx;
+  GstClockTime pending_seek_ts;
+
+  GstAdapter *adapter;
+  /* current offset of the first byte in the adapter / last byte we pushed or
+   * dropped*/
+  guint64 current_offset;
+  /* index = 1, header = 2, data = 3 */
+  guint current_index_header_or_data;
+
+  /* ISOBMFF box parsing */
+  gboolean is_isobmff;
+  struct {
+    /* index = 1, header = 2, data = 3 */
+    guint32 current_fourcc;
+    guint64 current_start_offset;
+    guint64 current_size;
+  } isobmff_parser;
+
+  GstMoofBox *moof;
+  guint64 moof_offset, moof_size;
+  GArray *moof_sync_samples;
+  guint current_sync_sample;
+
+  guint64 moof_average_size;
+  guint64 keyframe_average_size;
+  guint64 keyframe_average_distance;
+  gboolean first_sync_sample_after_moof, first_sync_sample_always_after_moof;
+
+  /* Internal position value, at the keyframe/entry level */
+  GstClockTime actual_position;
+  /* Timestamp of the beginning of the current fragment */
+  GstClockTime current_fragment_timestamp;
+  GstClockTime current_fragment_duration;
+  GstClockTime current_fragment_keyframe_distance;
+
+  /* Average keyframe download time (only in trickmode-key-units) */
+  GstClockTime average_download_time;
+  /* Cached target time (only in trickmode-key-units) */
+  GstClockTime target_time;
+  /* Average skip-ahead time (only in trickmode-key-units) */
+  GstClockTime average_skip_size;
+};
+
+/**
+ * GstDashDemux2:
+ *
+ * Opaque #GstDashDemux2 data structure.
+ */
+struct _GstDashDemux2
+{
+  GstAdaptiveDemux parent;
+
+  GSList *next_periods;
+
+  GstMPDClient2 *client;         /* MPD client */
+  GMutex client_lock;
+
+  GstDashDemux2ClockDrift *clock_drift;
+
+  gboolean end_of_period;
+  gboolean end_of_manifest;
+
+  /* Properties */
+  gint max_video_width, max_video_height;
+  gint max_video_framerate_n, max_video_framerate_d;
+  gchar* default_presentation_delay; /* presentation time delay if MPD@suggestedPresentationDelay is not present */
+
+  gboolean allow_trickmode_key_units;
+};
+
+struct _GstDashDemux2Class
+{
+  GstAdaptiveDemuxClass parent_class;
+};
+
+GType gst_dash_demux2_get_type (void);
+GType gst_dash_demux_stream_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (dashdemux2);
+
+G_END_DECLS
+#endif /* __GST_DASH_DEMUX_H__ */
+
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdadaptationsetnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdadaptationsetnode.c
new file mode 100644 (file)
index 0000000..c045ab7
--- /dev/null
@@ -0,0 +1,273 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdadaptationsetnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDAdaptationSetNode2, gst_mpd_adaptation_set_node,
+    GST_TYPE_MPD_REPRESENTATION_BASE_NODE);
+
+enum
+{
+  PROP_MPD_ADAPTATION_SET_0,
+  PROP_MPD_ADAPTATION_SET_ID,
+  PROP_MPD_ADAPTATION_SET_CONTENT_TYPE,
+};
+
+/* GObject VMethods */
+
+static void
+gst_mpd_adaptation_set_node_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstMPDAdaptationSetNode *self = GST_MPD_ADAPTATION_SET_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_ADAPTATION_SET_ID:
+      self->id = g_value_get_int (value);
+      break;
+    case PROP_MPD_ADAPTATION_SET_CONTENT_TYPE:
+      g_free (self->contentType);
+      self->contentType = g_value_dup_string (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_adaptation_set_node_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstMPDAdaptationSetNode *self = GST_MPD_ADAPTATION_SET_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_ADAPTATION_SET_ID:
+      g_value_set_int (value, self->id);
+      break;
+    case PROP_MPD_ADAPTATION_SET_CONTENT_TYPE:
+      g_value_set_string (value, self->contentType);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_adaptation_set_node_finalize (GObject * object)
+{
+  GstMPDAdaptationSetNode *self = GST_MPD_ADAPTATION_SET_NODE (object);
+
+  if (self->lang)
+    xmlFree (self->lang);
+  if (self->contentType)
+    xmlFree (self->contentType);
+  g_slice_free (GstXMLRatio, self->par);
+  g_slice_free (GstXMLConditionalUintType, self->segmentAlignment);
+  g_slice_free (GstXMLConditionalUintType, self->subsegmentAlignment);
+  g_list_free_full (self->Accessibility,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+  g_list_free_full (self->Role,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+  g_list_free_full (self->Rating,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+  g_list_free_full (self->Viewpoint,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+  gst_mpd_segment_base_node_free (self->SegmentBase);
+  gst_mpd_segment_list_node_free (self->SegmentList);
+  gst_mpd_segment_template_node_free (self->SegmentTemplate);
+  g_list_free_full (self->BaseURLs, (GDestroyNotify) gst_mpd_baseurl_node_free);
+  g_list_free_full (self->Representations,
+      (GDestroyNotify) gst_mpd_representation_node_free);
+  g_list_free_full (self->ContentComponents,
+      (GDestroyNotify) gst_mpd_content_component_node_free);
+  if (self->xlink_href)
+    xmlFree (self->xlink_href);
+
+  G_OBJECT_CLASS (gst_mpd_adaptation_set_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_adaptation_set_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr adaptation_set_xml_node = NULL;
+  GstMPDAdaptationSetNode *self = GST_MPD_ADAPTATION_SET_NODE (node);
+
+  adaptation_set_xml_node = xmlNewNode (NULL, (xmlChar *) "AdaptationSet");
+
+  if (self->id)
+    gst_xml_helper_set_prop_uint (adaptation_set_xml_node, "id", self->id);
+  if (self->group)
+    gst_xml_helper_set_prop_uint (adaptation_set_xml_node, "group",
+        self->group);
+
+  if (self->lang)
+    gst_xml_helper_set_prop_string (adaptation_set_xml_node, "lang",
+        self->lang);
+
+  if (self->contentType)
+    gst_xml_helper_set_prop_string (adaptation_set_xml_node, "contentType",
+        self->contentType);
+
+  if (self->minBandwidth)
+    gst_xml_helper_set_prop_uint (adaptation_set_xml_node, "minBandwidth",
+        self->minBandwidth);
+  if (self->maxBandwidth)
+    gst_xml_helper_set_prop_uint (adaptation_set_xml_node, "maxBandwidth",
+        self->maxBandwidth);
+  if (self->minWidth)
+    gst_xml_helper_set_prop_uint (adaptation_set_xml_node, "minWidth",
+        self->minWidth);
+  if (self->maxWidth)
+    gst_xml_helper_set_prop_uint (adaptation_set_xml_node, "maxWidth",
+        self->maxWidth);
+  if (self->minHeight)
+    gst_xml_helper_set_prop_uint (adaptation_set_xml_node, "minHeight",
+        self->minHeight);
+  if (self->maxHeight)
+    gst_xml_helper_set_prop_uint (adaptation_set_xml_node, "maxHeight",
+        self->maxHeight);
+
+  if (self->par)
+    gst_xml_helper_set_prop_ratio (adaptation_set_xml_node, "par", self->par);
+
+  gst_xml_helper_set_prop_cond_uint (adaptation_set_xml_node,
+      "segmentAlignment", self->segmentAlignment);
+  gst_xml_helper_set_prop_cond_uint (adaptation_set_xml_node,
+      "subsegmentAlignment", self->subsegmentAlignment);
+  gst_xml_helper_set_prop_uint (adaptation_set_xml_node,
+      "subsegmentStartsWithSAP", self->subsegmentStartsWithSAP);
+  gst_xml_helper_set_prop_boolean (adaptation_set_xml_node,
+      "bitstreamSwitching", self->bitstreamSwitching);
+
+  g_list_foreach (self->Accessibility, gst_mpd_node_get_list_item,
+      adaptation_set_xml_node);
+  g_list_foreach (self->Role, gst_mpd_node_get_list_item,
+      adaptation_set_xml_node);
+  g_list_foreach (self->Rating, gst_mpd_node_get_list_item,
+      adaptation_set_xml_node);
+  g_list_foreach (self->Viewpoint, gst_mpd_node_get_list_item,
+      adaptation_set_xml_node);
+
+  gst_mpd_node_add_child_node (GST_MPD_NODE (self->SegmentBase),
+      adaptation_set_xml_node);
+  gst_mpd_mult_segment_base_node_add_child_node (GST_MPD_NODE
+      (self->SegmentList), adaptation_set_xml_node);
+  gst_mpd_mult_segment_base_node_add_child_node (GST_MPD_NODE
+      (self->SegmentTemplate), adaptation_set_xml_node);
+
+  g_list_foreach (self->BaseURLs, gst_mpd_node_get_list_item,
+      adaptation_set_xml_node);
+  g_list_foreach (self->Representations,
+      gst_mpd_representation_base_node_get_list_item, adaptation_set_xml_node);
+  g_list_foreach (self->ContentComponents, gst_mpd_node_get_list_item,
+      adaptation_set_xml_node);
+
+  if (self->xlink_href)
+    gst_xml_helper_set_prop_string (adaptation_set_xml_node, "xlink_href",
+        self->xlink_href);
+  if (self->actuate == GST_MPD_XLINK_ACTUATE_ON_LOAD)
+    gst_xml_helper_set_prop_string (adaptation_set_xml_node, "actuate",
+        (gchar *) GST_MPD_XLINK_ACTUATE_ON_LOAD_STR);
+  return adaptation_set_xml_node;
+}
+
+static void
+gst_mpd_adaptation_set_node_class_init (GstMPDAdaptationSetNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_adaptation_set_node_finalize;
+  object_class->set_property = gst_mpd_adaptation_set_node_set_property;
+  object_class->get_property = gst_mpd_adaptation_set_node_get_property;
+
+  m_klass->get_xml_node = gst_mpd_adaptation_set_get_xml_node;
+
+  g_object_class_install_property (object_class, PROP_MPD_ADAPTATION_SET_ID,
+      g_param_spec_int ("id", "id",
+          "adaptation set id", 0, G_MAXINT, 0,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_ADAPTATION_SET_CONTENT_TYPE, g_param_spec_string ("content-type",
+          "content type", "content type of the adaptation set", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_mpd_adaptation_set_node_init (GstMPDAdaptationSetNode * self)
+{
+  self->id = 0;
+  self->group = 0;
+  self->lang = NULL;            /* LangVectorType RFC 5646 */
+  self->contentType = NULL;
+  self->par = 0;
+  self->minBandwidth = 0;
+  self->maxBandwidth = 0;
+  self->minWidth = 0;
+  self->maxWidth = 0;
+  self->minHeight = 0;
+  self->maxHeight = 0;
+  self->segmentAlignment = NULL;
+  self->subsegmentAlignment = NULL;
+  self->subsegmentStartsWithSAP = GST_SAP_TYPE_0;
+  self->bitstreamSwitching = FALSE;
+  /* list of Accessibility DescriptorType nodes */
+  self->Accessibility = NULL;
+  /* list of Role DescriptorType nodes */
+  self->Role = NULL;
+  /* list of Rating DescriptorType nodes */
+  self->Rating = NULL;
+  /* list of Viewpoint DescriptorType nodes */
+  self->Viewpoint = NULL;
+  /* SegmentBase node */
+  self->SegmentBase = NULL;
+  /* SegmentList node */
+  self->SegmentList = NULL;
+  /* SegmentTemplate node */
+  self->SegmentTemplate = NULL;
+  /* list of BaseURL nodes */
+  self->BaseURLs = NULL;
+  /* list of Representation nodes */
+  self->Representations = NULL;
+  /* list of ContentComponent nodes */
+  self->ContentComponents = NULL;
+
+  self->xlink_href = NULL;
+  self->actuate = GST_MPD_XLINK_ACTUATE_ON_REQUEST;
+}
+
+GstMPDAdaptationSetNode *
+gst_mpd_adaptation_set_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_ADAPTATION_SET_NODE, NULL);
+}
+
+void
+gst_mpd_adaptation_set_node_free (GstMPDAdaptationSetNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdadaptationsetnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdadaptationsetnode.h
new file mode 100644 (file)
index 0000000..c7c2539
--- /dev/null
@@ -0,0 +1,86 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDADAPTATIONSETNODE_H__
+#define __GSTMPDADAPTATIONSETNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+#include "gstmpdrepresentationbasenode.h"
+#include "gstmpdsegmentlistnode.h"
+#include "gstmpdsegmenttemplatenode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_ADAPTATION_SET_NODE gst_mpd_adaptation_set_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDAdaptationSetNode2, gst_mpd_adaptation_set_node, GST, MPD_ADAPTATION_SET_NODE, GstMPDRepresentationBaseNode)
+
+typedef GstMPDAdaptationSetNode2 GstMPDAdaptationSetNode;
+typedef GstMPDAdaptationSetNode2Class GstMPDAdaptationSetNodeClass;
+
+struct _GstMPDAdaptationSetNode2
+{
+  GstMPDRepresentationBaseNode parent_instance;
+  guint id;
+  guint group;
+  gchar *lang;                      /* LangVectorType RFC 5646 */
+  gchar *contentType;
+  GstXMLRatio *par;
+  guint minBandwidth;
+  guint maxBandwidth;
+  guint minWidth;
+  guint maxWidth;
+  guint minHeight;
+  guint maxHeight;
+  GstXMLConditionalUintType *segmentAlignment;
+  GstXMLConditionalUintType *subsegmentAlignment;
+  GstMPDSAPType subsegmentStartsWithSAP;
+  gboolean bitstreamSwitching;
+  /* list of Accessibility DescriptorType nodes */
+  GList *Accessibility;
+  /* list of Role DescriptorType nodes */
+  GList *Role;
+  /* list of Rating DescriptorType nodes */
+  GList *Rating;
+  /* list of Viewpoint DescriptorType nodes */
+  GList *Viewpoint;
+  /* SegmentBase node */
+  GstMPDSegmentBaseNode *SegmentBase;
+  /* SegmentList node */
+  GstMPDSegmentListNode *SegmentList;
+  /* SegmentTemplate node */
+  GstMPDSegmentTemplateNode *SegmentTemplate;
+  /* list of BaseURL nodes */
+  GList *BaseURLs;
+  /* list of Representation nodes */
+  GList *Representations;
+  /* list of ContentComponent nodes */
+  GList *ContentComponents;
+
+  gchar *xlink_href;
+  GstMPDXLinkActuate actuate;
+};
+
+GstMPDAdaptationSetNode * gst_mpd_adaptation_set_node_new (void);
+void gst_mpd_adaptation_set_node_free (GstMPDAdaptationSetNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDADAPTATIONSETNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdbaseurlnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdbaseurlnode.c
new file mode 100644 (file)
index 0000000..6e298d1
--- /dev/null
@@ -0,0 +1,166 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdbaseurlnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDBaseURLNode2, gst_mpd_baseurl_node, GST_TYPE_MPD_NODE);
+
+enum
+{
+  PROP_MPD_BASEURL_0,
+  PROP_MPD_BASEURL_URL,
+  PROP_MPD_BASEURL_SERVICE_LOCATION,
+  PROP_MPD_BASEURL_BYTE_RANGE,
+};
+
+/* GObject VMethods */
+
+static void
+gst_mpd_baseurl_node_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstMPDBaseURLNode *self = GST_MPD_BASEURL_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_BASEURL_URL:
+      g_free (self->baseURL);
+      self->baseURL = g_value_dup_string (value);
+      break;
+    case PROP_MPD_BASEURL_SERVICE_LOCATION:
+      g_free (self->serviceLocation);
+      self->serviceLocation = g_value_dup_string (value);
+      break;
+    case PROP_MPD_BASEURL_BYTE_RANGE:
+      g_free (self->byteRange);
+      self->byteRange = g_value_dup_string (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_baseurl_node_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstMPDBaseURLNode *self = GST_MPD_BASEURL_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_BASEURL_URL:
+      g_value_set_string (value, self->baseURL);
+      break;
+    case PROP_MPD_BASEURL_SERVICE_LOCATION:
+      g_value_set_string (value, self->serviceLocation);
+      break;
+    case PROP_MPD_BASEURL_BYTE_RANGE:
+      g_value_set_string (value, self->byteRange);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_baseurl_node_finalize (GObject * object)
+{
+  GstMPDBaseURLNode *self = GST_MPD_BASEURL_NODE (object);
+
+  g_free (self->baseURL);
+  g_free (self->serviceLocation);
+  g_free (self->byteRange);
+
+  G_OBJECT_CLASS (gst_mpd_baseurl_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_baseurl_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr baseurl_xml_node = NULL;
+  GstMPDBaseURLNode *self = GST_MPD_BASEURL_NODE (node);
+
+  baseurl_xml_node = xmlNewNode (NULL, (xmlChar *) "BaseURL");
+
+  if (self->serviceLocation)
+    gst_xml_helper_set_prop_string (baseurl_xml_node, "serviceLocation",
+        self->serviceLocation);
+
+  if (self->byteRange)
+    gst_xml_helper_set_prop_string (baseurl_xml_node, "byteRange",
+        self->byteRange);
+
+  if (self->baseURL)
+    gst_xml_helper_set_content (baseurl_xml_node, self->baseURL);
+
+  return baseurl_xml_node;
+}
+
+static void
+gst_mpd_baseurl_node_class_init (GstMPDBaseURLNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_baseurl_node_finalize;
+  object_class->set_property = gst_mpd_baseurl_node_set_property;
+  object_class->get_property = gst_mpd_baseurl_node_get_property;
+
+  m_klass->get_xml_node = gst_mpd_baseurl_get_xml_node;
+
+  g_object_class_install_property (object_class, PROP_MPD_BASEURL_URL,
+      g_param_spec_string ("url", "base url",
+          "url of the base url", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_BASEURL_SERVICE_LOCATION,
+      g_param_spec_string ("service-location", "service location",
+          "service location", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class, PROP_MPD_BASEURL_BYTE_RANGE,
+      g_param_spec_string ("byte-range", "byte range", "byte range", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+}
+
+static void
+gst_mpd_baseurl_node_init (GstMPDBaseURLNode * self)
+{
+  self->baseURL = NULL;
+  self->serviceLocation = NULL;
+  self->byteRange = NULL;
+}
+
+GstMPDBaseURLNode *
+gst_mpd_baseurl_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_BASEURL_NODE, NULL);
+}
+
+void
+gst_mpd_baseurl_node_free (GstMPDBaseURLNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdbaseurlnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdbaseurlnode.h
new file mode 100644 (file)
index 0000000..7f4642d
--- /dev/null
@@ -0,0 +1,49 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDBASEURLNODE_H__
+#define __GSTMPDBASEURLNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_BASEURL_NODE gst_mpd_baseurl_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDBaseURLNode2, gst_mpd_baseurl_node, GST, MPD_BASEURL_NODE, GstMPDNode)
+
+typedef GstMPDBaseURLNode2 GstMPDBaseURLNode;
+typedef GstMPDBaseURLNode2Class GstMPDBaseURLNodeClass;
+
+struct _GstMPDBaseURLNode2
+{
+  GstObject     parent_instance;
+  gchar *baseURL;
+  gchar *serviceLocation;
+  gchar *byteRange;
+  /* TODO add missing fields such as weight etc.*/
+};
+
+GstMPDBaseURLNode * gst_mpd_baseurl_node_new (void);
+void gst_mpd_baseurl_node_free (GstMPDBaseURLNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDBASEURLNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdclient.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdclient.c
new file mode 100644 (file)
index 0000000..16aaa34
--- /dev/null
@@ -0,0 +1,3449 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+
+#include "gstmpdclient.h"
+#include "gstmpdparser.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_dash_mpd_client_debug);
+#undef GST_CAT_DEFAULT
+#define GST_CAT_DEFAULT gst_dash_mpd_client_debug
+
+G_DEFINE_TYPE (GstMPDClient2, gst_mpd_client2, GST_TYPE_OBJECT);
+
+static GstMPDSegmentBaseNode *gst_mpd_client2_get_segment_base (GstMPDPeriodNode
+    * Period, GstMPDAdaptationSetNode * AdaptationSet,
+    GstMPDRepresentationNode * Representation);
+static GstMPDSegmentListNode *gst_mpd_client2_get_segment_list (GstMPDClient2 *
+    client, GstMPDPeriodNode * Period, GstMPDAdaptationSetNode * AdaptationSet,
+    GstMPDRepresentationNode * Representation);
+/* Segments */
+static guint gst_mpd_client2_get_segments_counts (GstMPDClient2 * client,
+    GstActiveStream * stream);
+
+static GList *gst_mpd_client2_fetch_external_periods (GstMPDClient2 * client,
+    GstMPDPeriodNode * period_node);
+static GList *gst_mpd_client2_fetch_external_adaptation_set (GstMPDClient2 *
+    client, GstMPDPeriodNode * period, GstMPDAdaptationSetNode * adapt_set);
+
+static GstMPDRepresentationNode
+    * gst_mpd_client2_get_lowest_representation (GList * Representations);
+static GstStreamPeriod *gst_mpd_client2_get_stream_period (GstMPDClient2 *
+    client);
+
+typedef GstMPDNode *(*MpdClientStringIDFilter) (GList * list, gchar * data);
+typedef GstMPDNode *(*MpdClientIDFilter) (GList * list, guint data);
+
+static GstMPDNode *
+gst_mpd_client2_get_period_with_id (GList * periods, gchar * period_id)
+{
+  GstMPDPeriodNode *period;
+  GList *list = NULL;
+
+  for (list = g_list_first (periods); list; list = g_list_next (list)) {
+    period = (GstMPDPeriodNode *) list->data;
+    if (!g_strcmp0 (period->id, period_id))
+      return GST_MPD_NODE (period);
+  }
+  return NULL;
+}
+
+static GstMPDNode *
+gst_mpd_client2_get_adaptation_set_with_id (GList * adaptation_sets, guint id)
+{
+  GstMPDAdaptationSetNode *adaptation_set;
+  GList *list = NULL;
+
+  for (list = g_list_first (adaptation_sets); list; list = g_list_next (list)) {
+    adaptation_set = (GstMPDAdaptationSetNode *) list->data;
+    if (adaptation_set->id == id)
+      return GST_MPD_NODE (adaptation_set);
+  }
+  return NULL;
+}
+
+static GstMPDNode *
+gst_mpd_client2_get_representation_with_id (GList * representations,
+    gchar * rep_id)
+{
+  GstMPDRepresentationNode *representation;
+  GList *list = NULL;
+
+  for (list = g_list_first (representations); list; list = g_list_next (list)) {
+    representation = (GstMPDRepresentationNode *) list->data;
+    if (!g_strcmp0 (representation->id, rep_id))
+      return GST_MPD_NODE (representation);
+  }
+  return NULL;
+}
+
+static gchar *
+_generate_new_string_id (GList * list, const gchar * tuple,
+    MpdClientStringIDFilter filter)
+{
+  guint i = 0;
+  gchar *id = NULL;
+  GstMPDNode *node;
+  do {
+    g_free (id);
+    id = g_strdup_printf (tuple, i);
+    node = filter (list, id);
+    i++;
+  } while (node);
+
+  return id;
+}
+
+static guint
+_generate_new_id (GList * list, MpdClientIDFilter filter)
+{
+  guint id = 0;
+  GstMPDNode *node;
+  do {
+    node = filter (list, id);
+    id++;
+  } while (node);
+
+  return id;
+}
+
+static GstMPDRepresentationNode *
+gst_mpd_client2_get_lowest_representation (GList * Representations)
+{
+  GList *list = NULL;
+  GstMPDRepresentationNode *rep = NULL;
+  GstMPDRepresentationNode *lowest = NULL;
+
+  if (Representations == NULL)
+    return NULL;
+
+  for (list = g_list_first (Representations); list; list = g_list_next (list)) {
+    rep = (GstMPDRepresentationNode *) list->data;
+    if (rep && (!lowest || rep->bandwidth < lowest->bandwidth)) {
+      lowest = rep;
+    }
+  }
+
+  return lowest;
+}
+
+#if 0
+static GstMPDRepresentationNode *
+gst_mpdparser_get_highest_representation (GList * Representations)
+{
+  GList *list = NULL;
+
+  if (Representations == NULL)
+    return NULL;
+
+  list = g_list_last (Representations);
+
+  return list ? (GstMPDRepresentationNode *) list->data : NULL;
+}
+
+static GstMPDRepresentationNode *
+gst_mpdparser_get_representation_with_max_bandwidth (GList * Representations,
+    gint max_bandwidth)
+{
+  GList *list = NULL;
+  GstMPDRepresentationNode *representation, *best_rep = NULL;
+
+  if (Representations == NULL)
+    return NULL;
+
+  if (max_bandwidth <= 0)       /* 0 => get highest representation available */
+    return gst_mpdparser_get_highest_representation (Representations);
+
+  for (list = g_list_first (Representations); list; list = g_list_next (list)) {
+    representation = (GstMPDRepresentationNode *) list->data;
+    if (representation && representation->bandwidth <= max_bandwidth) {
+      best_rep = representation;
+    }
+  }
+
+  return best_rep;
+}
+#endif
+
+static GstMPDSegmentListNode *
+gst_mpd_client2_fetch_external_segment_list (GstMPDClient2 * client,
+    GstMPDPeriodNode * Period,
+    GstMPDAdaptationSetNode * AdaptationSet,
+    GstMPDRepresentationNode * Representation,
+    GstMPDSegmentListNode * parent, GstMPDSegmentListNode * segment_list)
+{
+  DownloadRequest *download;
+  GstBuffer *segment_list_buffer = NULL;
+  GError *err = NULL;
+
+  GstUri *base_uri, *uri;
+  gchar *query = NULL;
+  gchar *uri_string;
+  GstMPDSegmentListNode *new_segment_list = NULL;
+
+  /* ISO/IEC 23009-1:2014 5.5.3 4)
+   * Remove nodes that resolve to nothing when resolving
+   */
+  if (strcmp (segment_list->xlink_href,
+          "urn:mpeg:dash:resolve-to-zero:2013") == 0) {
+    return NULL;
+  }
+
+  if (!client->download_helper) {
+    return NULL;
+  }
+
+  /* Build absolute URI */
+
+  /* Get base URI at the MPD level */
+  base_uri =
+      gst_uri_from_string (client->mpd_base_uri ? client->
+      mpd_base_uri : client->mpd_uri);
+
+  /* combine a BaseURL at the MPD level with the current base url */
+  base_uri =
+      gst_mpd_helper_combine_urls (base_uri, client->mpd_root_node->BaseURLs,
+      &query, 0);
+
+  /* combine a BaseURL at the Period level with the current base url */
+  base_uri =
+      gst_mpd_helper_combine_urls (base_uri, Period->BaseURLs, &query, 0);
+
+  if (AdaptationSet) {
+    /* combine a BaseURL at the AdaptationSet level with the current base url */
+    base_uri =
+        gst_mpd_helper_combine_urls (base_uri, AdaptationSet->BaseURLs, &query,
+        0);
+
+    if (Representation) {
+      /* combine a BaseURL at the Representation level with the current base url */
+      base_uri =
+          gst_mpd_helper_combine_urls (base_uri, Representation->BaseURLs,
+          &query, 0);
+    }
+  }
+
+  uri = gst_uri_from_string_with_base (base_uri, segment_list->xlink_href);
+  if (query)
+    gst_uri_set_query_string (uri, query);
+  g_free (query);
+  uri_string = gst_uri_to_string (uri);
+  gst_uri_unref (base_uri);
+  gst_uri_unref (uri);
+
+  download =
+      downloadhelper_fetch_uri (client->download_helper,
+      uri_string, client->mpd_uri,
+      DOWNLOAD_FLAG_COMPRESS | DOWNLOAD_FLAG_FORCE_REFRESH, &err);
+  g_free (uri_string);
+
+  if (!download) {
+    GST_ERROR ("Failed to download external SegmentList node at '%s': %s",
+        segment_list->xlink_href, err->message);
+    g_clear_error (&err);
+    return NULL;
+  }
+
+  segment_list_buffer = download_request_take_buffer (download);
+  download_request_unref (download);
+
+  if (segment_list_buffer) {
+    GstMapInfo map;
+
+    gst_buffer_map (segment_list_buffer, &map, GST_MAP_READ);
+    new_segment_list =
+        gst_mpdparser_get_external_segment_list ((const gchar *) map.data,
+        map.size, parent);
+
+    gst_buffer_unmap (segment_list_buffer, &map);
+    gst_buffer_unref (segment_list_buffer);
+  }
+
+  return new_segment_list;
+}
+
+static GstMPDSegmentBaseNode *
+gst_mpd_client2_get_segment_base (GstMPDPeriodNode * Period,
+    GstMPDAdaptationSetNode * AdaptationSet,
+    GstMPDRepresentationNode * Representation)
+{
+  GstMPDSegmentBaseNode *SegmentBase = NULL;
+
+  if (Representation && Representation->SegmentBase) {
+    SegmentBase = Representation->SegmentBase;
+  } else if (AdaptationSet && AdaptationSet->SegmentBase) {
+    SegmentBase = AdaptationSet->SegmentBase;
+  } else if (Period && Period->SegmentBase) {
+    SegmentBase = Period->SegmentBase;
+  }
+  /* the SegmentBase element could be encoded also inside a SegmentList element */
+  if (SegmentBase == NULL) {
+    if (Representation && Representation->SegmentList
+        && GST_MPD_MULT_SEGMENT_BASE_NODE (Representation->SegmentList)
+        && GST_MPD_MULT_SEGMENT_BASE_NODE (Representation->
+            SegmentList)->SegmentBase) {
+      SegmentBase =
+          GST_MPD_MULT_SEGMENT_BASE_NODE (Representation->
+          SegmentList)->SegmentBase;
+    } else if (AdaptationSet && AdaptationSet->SegmentList
+        && GST_MPD_MULT_SEGMENT_BASE_NODE (AdaptationSet->SegmentList)
+        && GST_MPD_MULT_SEGMENT_BASE_NODE (AdaptationSet->
+            SegmentList)->SegmentBase) {
+      SegmentBase =
+          GST_MPD_MULT_SEGMENT_BASE_NODE (AdaptationSet->
+          SegmentList)->SegmentBase;
+    } else if (Period && Period->SegmentList
+        && GST_MPD_MULT_SEGMENT_BASE_NODE (Period->SegmentList)
+        && GST_MPD_MULT_SEGMENT_BASE_NODE (Period->SegmentList)->SegmentBase) {
+      SegmentBase =
+          GST_MPD_MULT_SEGMENT_BASE_NODE (Period->SegmentList)->SegmentBase;
+    }
+  }
+
+  return SegmentBase;
+}
+
+static GstMPDSegmentListNode *
+gst_mpd_client2_get_segment_list (GstMPDClient2 * client,
+    GstMPDPeriodNode * Period, GstMPDAdaptationSetNode * AdaptationSet,
+    GstMPDRepresentationNode * Representation)
+{
+  GstMPDSegmentListNode **SegmentList;
+  GstMPDSegmentListNode *ParentSegmentList = NULL;
+
+  if (Representation && Representation->SegmentList) {
+    SegmentList = &Representation->SegmentList;
+    ParentSegmentList = AdaptationSet->SegmentList;
+  } else if (AdaptationSet && AdaptationSet->SegmentList) {
+    SegmentList = &AdaptationSet->SegmentList;
+    ParentSegmentList = Period->SegmentList;
+    Representation = NULL;
+  } else {
+    Representation = NULL;
+    AdaptationSet = NULL;
+    SegmentList = &Period->SegmentList;
+  }
+
+  /* Resolve external segment list here. */
+  if (*SegmentList && (*SegmentList)->xlink_href) {
+    GstMPDSegmentListNode *new_segment_list;
+
+    /* TODO: Use SegmentList of parent if
+     * - Parent has its own SegmentList
+     * - Fail to get SegmentList from external xml
+     */
+    new_segment_list =
+        gst_mpd_client2_fetch_external_segment_list (client, Period,
+        AdaptationSet, Representation, ParentSegmentList, *SegmentList);
+
+    gst_mpd_segment_list_node_free (*SegmentList);
+    *SegmentList = new_segment_list;
+  }
+
+  return *SegmentList;
+}
+
+static GstClockTime
+gst_mpd_client2_get_segment_duration (GstMPDClient2 * client,
+    GstActiveStream * stream, guint64 * scale_dur)
+{
+  GstStreamPeriod *stream_period;
+  GstMPDMultSegmentBaseNode *base = NULL;
+  GstClockTime duration = 0;
+
+  g_return_val_if_fail (stream != NULL, GST_CLOCK_TIME_NONE);
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  g_return_val_if_fail (stream_period != NULL, GST_CLOCK_TIME_NONE);
+
+  if (stream->cur_segment_list) {
+    base = GST_MPD_MULT_SEGMENT_BASE_NODE (stream->cur_segment_list);
+  } else if (stream->cur_seg_template) {
+    base = GST_MPD_MULT_SEGMENT_BASE_NODE (stream->cur_seg_template);
+  }
+
+  if (base == NULL || base->SegmentBase == NULL) {
+    /* this may happen when we have a single segment */
+    duration = stream_period->duration;
+    if (scale_dur)
+      *scale_dur = duration;
+  } else {
+    /* duration is guint so this cannot overflow */
+    duration = base->duration * GST_SECOND;
+    if (scale_dur)
+      *scale_dur = duration;
+    duration /= base->SegmentBase->timescale;
+  }
+
+  return duration;
+}
+
+void
+gst_mpd_client2_active_streams_free (GstMPDClient2 * client)
+{
+  if (client->active_streams) {
+    g_list_foreach (client->active_streams,
+        (GFunc) gst_mpdparser_free_active_stream, NULL);
+    g_list_free (client->active_streams);
+    client->active_streams = NULL;
+  }
+}
+
+static void
+gst_mpd_client2_finalize (GObject * object)
+{
+  GstMPDClient2 *client = GST_MPD_CLIENT (object);
+
+  if (client->mpd_root_node)
+    gst_mpd_root_node_free (client->mpd_root_node);
+
+  if (client->periods) {
+    g_list_free_full (client->periods,
+        (GDestroyNotify) gst_mpdparser_free_stream_period);
+  }
+
+  gst_mpd_client2_active_streams_free (client);
+
+  g_free (client->mpd_uri);
+  client->mpd_uri = NULL;
+  g_free (client->mpd_base_uri);
+  client->mpd_base_uri = NULL;
+
+  G_OBJECT_CLASS (gst_mpd_client2_parent_class)->finalize (object);
+}
+
+static void
+gst_mpd_client2_class_init (GstMPDClient2Class * klass)
+{
+  GObjectClass *object_class = G_OBJECT_CLASS (klass);
+  object_class->finalize = gst_mpd_client2_finalize;
+}
+
+static void
+gst_mpd_client2_init (GstMPDClient2 * client)
+{
+}
+
+GstMPDClient2 *
+gst_mpd_client2_new (void)
+{
+  GST_DEBUG_CATEGORY_INIT (gst_dash_mpd_client_debug, "dashmpdclient2", 0,
+      "DashmMpdClient");
+  return g_object_new (GST_TYPE_MPD_CLIENT, NULL);
+}
+
+GstMPDClient2 *
+gst_mpd_client2_new_static (void)
+{
+  GstMPDClient2 *client = gst_mpd_client2_new ();
+
+  client->mpd_root_node = gst_mpd_root_node_new ();
+  client->mpd_root_node->default_namespace =
+      g_strdup ("urn:mpeg:dash:schema:mpd:2011");
+  client->mpd_root_node->profiles =
+      g_strdup ("urn:mpeg:dash:profile:isoff-main:2011");
+  client->mpd_root_node->type = GST_MPD_FILE_TYPE_STATIC;
+  client->mpd_root_node->minBufferTime = 1500;
+
+  return client;
+}
+
+void
+gst_mpd_client2_free (GstMPDClient2 * client)
+{
+  if (client)
+    gst_object_unref (client);
+}
+
+gboolean
+gst_mpd_client2_parse (GstMPDClient2 * client, const gchar * data, gint size)
+{
+  gboolean ret = FALSE;
+
+
+  ret = gst_mpdparser_get_mpd_root_node (&client->mpd_root_node, data, size);
+
+  if (ret) {
+    gst_mpd_client2_check_profiles (client);
+    gst_mpd_client2_fetch_on_load_external_resources (client);
+  }
+
+  return ret;
+}
+
+
+gboolean
+gst_mpd_client2_get_xml_content (GstMPDClient2 * client, gchar ** data,
+    gint * size)
+{
+  gboolean ret = FALSE;
+
+  g_return_val_if_fail (client != NULL, ret);
+  g_return_val_if_fail (client->mpd_root_node != NULL, ret);
+
+  ret = gst_mpd_node_get_xml_buffer (GST_MPD_NODE (client->mpd_root_node),
+      data, (int *) size);
+
+  return ret;
+}
+
+GstDateTime *
+gst_mpd_client2_get_availability_start_time (GstMPDClient2 * client)
+{
+  GstDateTime *start_time;
+
+  if (client == NULL)
+    return (GstDateTime *) NULL;
+
+  start_time = client->mpd_root_node->availabilityStartTime;
+  if (start_time)
+    gst_date_time_ref (start_time);
+  return start_time;
+}
+
+void
+gst_mpd_client2_set_download_helper (GstMPDClient2 * client,
+    DownloadHelper * dh)
+{
+  client->download_helper = dh;
+}
+
+void
+gst_mpd_client2_check_profiles (GstMPDClient2 * client)
+{
+  GST_DEBUG ("Profiles: %s",
+      client->mpd_root_node->profiles ? client->mpd_root_node->
+      profiles : "<none>");
+
+  if (!client->mpd_root_node->profiles)
+    return;
+
+  if (g_strstr_len (client->mpd_root_node->profiles, -1,
+          "urn:mpeg:dash:profile:isoff-on-demand:2011")) {
+    client->profile_isoff_ondemand = TRUE;
+    GST_DEBUG ("Found ISOFF on demand profile (2011)");
+  }
+}
+
+void
+gst_mpd_client2_fetch_on_load_external_resources (GstMPDClient2 * client)
+{
+  GList *l;
+
+  for (l = client->mpd_root_node->Periods; l; /* explicitly advanced below */ ) {
+    GstMPDPeriodNode *period = l->data;
+    GList *m;
+
+    if (period->xlink_href && period->actuate == GST_MPD_XLINK_ACTUATE_ON_LOAD) {
+      GList *new_periods, *prev, *next;
+
+      new_periods = gst_mpd_client2_fetch_external_periods (client, period);
+
+      prev = l->prev;
+      client->mpd_root_node->Periods =
+          g_list_delete_link (client->mpd_root_node->Periods, l);
+      gst_mpd_period_node_free (period);
+      period = NULL;
+
+      /* Get new next node, we will insert before this */
+      if (prev)
+        next = prev->next;
+      else
+        next = client->mpd_root_node->Periods;
+
+      while (new_periods) {
+        client->mpd_root_node->Periods =
+            g_list_insert_before (client->mpd_root_node->Periods, next,
+            new_periods->data);
+        new_periods = g_list_delete_link (new_periods, new_periods);
+      }
+      next = NULL;
+
+      /* Update our iterator to the first new period if any, or the next */
+      if (prev)
+        l = prev->next;
+      else
+        l = client->mpd_root_node->Periods;
+
+      continue;
+    }
+
+    if (period->SegmentList && period->SegmentList->xlink_href
+        && period->SegmentList->actuate == GST_MPD_XLINK_ACTUATE_ON_LOAD) {
+      GstMPDSegmentListNode *new_segment_list;
+
+      new_segment_list =
+          gst_mpd_client2_fetch_external_segment_list (client, period, NULL,
+          NULL, NULL, period->SegmentList);
+
+      gst_mpd_segment_list_node_free (period->SegmentList);
+      period->SegmentList = new_segment_list;
+    }
+
+    for (m = period->AdaptationSets; m; /* explicitly advanced below */ ) {
+      GstMPDAdaptationSetNode *adapt_set = m->data;
+      GList *n;
+
+      if (adapt_set->xlink_href
+          && adapt_set->actuate == GST_MPD_XLINK_ACTUATE_ON_LOAD) {
+        GList *new_adapt_sets, *prev, *next;
+
+        new_adapt_sets =
+            gst_mpd_client2_fetch_external_adaptation_set (client, period,
+            adapt_set);
+
+        prev = m->prev;
+        period->AdaptationSets = g_list_delete_link (period->AdaptationSets, m);
+        gst_mpd_adaptation_set_node_free (adapt_set);
+        adapt_set = NULL;
+
+        /* Get new next node, we will insert before this */
+        if (prev)
+          next = prev->next;
+        else
+          next = period->AdaptationSets;
+
+        while (new_adapt_sets) {
+          period->AdaptationSets =
+              g_list_insert_before (period->AdaptationSets, next,
+              new_adapt_sets->data);
+          new_adapt_sets = g_list_delete_link (new_adapt_sets, new_adapt_sets);
+        }
+        next = NULL;
+
+        /* Update our iterator to the first new adapt_set if any, or the next */
+        if (prev)
+          m = prev->next;
+        else
+          m = period->AdaptationSets;
+
+        continue;
+      }
+
+      if (adapt_set->SegmentList && adapt_set->SegmentList->xlink_href
+          && adapt_set->SegmentList->actuate == GST_MPD_XLINK_ACTUATE_ON_LOAD) {
+        GstMPDSegmentListNode *new_segment_list;
+
+        new_segment_list =
+            gst_mpd_client2_fetch_external_segment_list (client, period,
+            adapt_set, NULL, period->SegmentList, adapt_set->SegmentList);
+
+        gst_mpd_segment_list_node_free (adapt_set->SegmentList);
+        adapt_set->SegmentList = new_segment_list;
+      }
+
+      for (n = adapt_set->Representations; n; n = n->next) {
+        GstMPDRepresentationNode *representation = n->data;
+
+        if (representation->SegmentList
+            && representation->SegmentList->xlink_href
+            && representation->SegmentList->actuate ==
+            GST_MPD_XLINK_ACTUATE_ON_LOAD) {
+
+          GstMPDSegmentListNode *new_segment_list;
+
+          new_segment_list =
+              gst_mpd_client2_fetch_external_segment_list (client, period,
+              adapt_set, representation, adapt_set->SegmentList,
+              representation->SegmentList);
+
+          gst_mpd_segment_list_node_free (representation->SegmentList);
+          representation->SegmentList = new_segment_list;
+
+        }
+      }
+
+      m = m->next;
+    }
+
+    l = l->next;
+  }
+}
+
+
+static GstStreamPeriod *
+gst_mpd_client2_get_stream_period (GstMPDClient2 * client)
+{
+  g_return_val_if_fail (client != NULL, NULL);
+  g_return_val_if_fail (client->periods != NULL, NULL);
+
+  return g_list_nth_data (client->periods, client->period_idx);
+}
+
+const gchar *
+gst_mpd_client2_get_baseURL (GstMPDClient2 * client, guint indexStream)
+{
+  GstActiveStream *stream;
+
+  g_return_val_if_fail (client != NULL, NULL);
+  g_return_val_if_fail (client->active_streams != NULL, NULL);
+  stream = g_list_nth_data (client->active_streams, indexStream);
+  g_return_val_if_fail (stream != NULL, NULL);
+
+  return stream->baseURL;
+}
+
+/* select a stream and extract the baseURL (if present) */
+gchar *
+gst_mpd_client2_parse_baseURL (GstMPDClient2 * client, GstActiveStream * stream,
+    gchar ** query)
+{
+  GstStreamPeriod *stream_period;
+  static const gchar empty[] = "";
+  gchar *ret = NULL;
+  GstUri *abs_url;
+
+  g_return_val_if_fail (stream != NULL, g_strdup (empty));
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  g_return_val_if_fail (stream_period != NULL, g_strdup (empty));
+  g_return_val_if_fail (stream_period->period != NULL, g_strdup (empty));
+
+  /* NULLify query return before we start */
+  if (query)
+    *query = NULL;
+
+  /* initialise base url */
+  abs_url =
+      gst_uri_from_string (client->mpd_base_uri ? client->
+      mpd_base_uri : client->mpd_uri);
+
+  /* combine a BaseURL at the MPD level with the current base url */
+  abs_url =
+      gst_mpd_helper_combine_urls (abs_url, client->mpd_root_node->BaseURLs,
+      query, stream->baseURL_idx);
+
+  /* combine a BaseURL at the Period level with the current base url */
+  abs_url =
+      gst_mpd_helper_combine_urls (abs_url, stream_period->period->BaseURLs,
+      query, stream->baseURL_idx);
+
+  GST_DEBUG ("Current adaptation set id %i (%s)", stream->cur_adapt_set->id,
+      stream->cur_adapt_set->contentType);
+  /* combine a BaseURL at the AdaptationSet level with the current base url */
+  abs_url =
+      gst_mpd_helper_combine_urls (abs_url, stream->cur_adapt_set->BaseURLs,
+      query, stream->baseURL_idx);
+
+  /* combine a BaseURL at the Representation level with the current base url */
+  abs_url =
+      gst_mpd_helper_combine_urls (abs_url,
+      stream->cur_representation->BaseURLs, query, stream->baseURL_idx);
+
+  ret = gst_uri_to_string (abs_url);
+  gst_uri_unref (abs_url);
+
+  return ret;
+}
+
+static GstClockTime
+gst_mpd_client2_get_segment_end_time (GstMPDClient2 * client,
+    GPtrArray * segments, const GstMediaSegment * segment, gint index)
+{
+  const GstStreamPeriod *stream_period;
+  GstClockTime end;
+
+  if (segment->repeat >= 0)
+    return segment->start + (segment->repeat + 1) * segment->duration;
+
+  if (index < segments->len - 1) {
+    const GstMediaSegment *next_segment =
+        g_ptr_array_index (segments, index + 1);
+    end = next_segment->start;
+  } else {
+    stream_period = gst_mpd_client2_get_stream_period (client);
+    end = stream_period->start + stream_period->duration;
+  }
+  return end;
+}
+
+static gboolean
+gst_mpd_client2_add_media_segment (GstActiveStream * stream,
+    GstMPDSegmentURLNode * url_node, guint number, gint repeat,
+    guint64 scale_start, guint64 scale_duration,
+    GstClockTime start, GstClockTime duration)
+{
+  GstMediaSegment *media_segment;
+
+  g_return_val_if_fail (stream->segments != NULL, FALSE);
+
+  media_segment = g_slice_new0 (GstMediaSegment);
+
+  media_segment->SegmentURL = url_node;
+  media_segment->number = number;
+  media_segment->scale_start = scale_start;
+  media_segment->scale_duration = scale_duration;
+  media_segment->start = start;
+  media_segment->duration = duration;
+  media_segment->repeat = repeat;
+
+  g_ptr_array_add (stream->segments, media_segment);
+  GST_LOG ("Added new segment: number %d, repeat %d, "
+      "ts: %" GST_TIME_FORMAT ", dur: %"
+      GST_TIME_FORMAT, number, repeat,
+      GST_TIME_ARGS (start), GST_TIME_ARGS (duration));
+
+  return TRUE;
+}
+
+static void
+gst_mpd_client2_stream_update_presentation_time_offset (GstMPDClient2 * client,
+    GstActiveStream * stream)
+{
+  GstMPDSegmentBaseNode *segbase = NULL;
+
+  /* Find the used segbase */
+  if (stream->cur_segment_list) {
+    segbase =
+        GST_MPD_MULT_SEGMENT_BASE_NODE (stream->cur_segment_list)->SegmentBase;
+  } else if (stream->cur_seg_template) {
+    segbase =
+        GST_MPD_MULT_SEGMENT_BASE_NODE (stream->cur_seg_template)->SegmentBase;
+  } else if (stream->cur_segment_base) {
+    segbase = stream->cur_segment_base;
+  }
+
+  if (segbase) {
+    /* Avoid overflows */
+    stream->presentationTimeOffset =
+        gst_util_uint64_scale (segbase->presentationTimeOffset, GST_SECOND,
+        segbase->timescale);
+  } else {
+    stream->presentationTimeOffset = 0;
+  }
+
+  GST_LOG ("Setting stream's presentation time offset to %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (stream->presentationTimeOffset));
+}
+
+gboolean
+gst_mpd_client2_setup_representation (GstMPDClient2 * client,
+    GstActiveStream * stream, GstMPDRepresentationNode * representation)
+{
+  GstStreamPeriod *stream_period;
+  GList *rep_list;
+  GstClockTime PeriodStart, PeriodEnd, start_time, duration;
+  guint i;
+  guint64 start;
+
+  if (stream->cur_adapt_set == NULL) {
+    GST_WARNING ("No valid AdaptationSet node in the MPD file, aborting...");
+    return FALSE;
+  }
+
+  rep_list = stream->cur_adapt_set->Representations;
+  stream->cur_representation = representation;
+  stream->representation_idx = g_list_index (rep_list, representation);
+
+  /* clean the old segment list, if any */
+  if (stream->segments) {
+    g_ptr_array_unref (stream->segments);
+    stream->segments = NULL;
+  }
+
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  g_return_val_if_fail (stream_period != NULL, FALSE);
+  g_return_val_if_fail (stream_period->period != NULL, FALSE);
+
+  PeriodStart = stream_period->start;
+  if (GST_CLOCK_TIME_IS_VALID (stream_period->duration))
+    PeriodEnd = stream_period->start + stream_period->duration;
+  else
+    PeriodEnd = GST_CLOCK_TIME_NONE;
+
+  GST_LOG ("Building segment list for Period from %" GST_TIME_FORMAT " to %"
+      GST_TIME_FORMAT, GST_TIME_ARGS (PeriodStart), GST_TIME_ARGS (PeriodEnd));
+
+  if (representation->SegmentBase != NULL
+      || representation->SegmentList != NULL) {
+    GList *SegmentURL;
+
+    /* We have a fixed list of segments for any of the cases here,
+     * init the segments list */
+    gst_mpdparser_init_active_stream_segments (stream);
+
+    /* get the first segment_base of the selected representation */
+    if ((stream->cur_segment_base =
+            gst_mpd_client2_get_segment_base (stream_period->period,
+                stream->cur_adapt_set, representation)) == NULL) {
+      GST_DEBUG ("No useful SegmentBase node for the current Representation");
+    }
+
+    /* get the first segment_list of the selected representation */
+    if ((stream->cur_segment_list =
+            gst_mpd_client2_get_segment_list (client, stream_period->period,
+                stream->cur_adapt_set, representation)) == NULL) {
+      GST_DEBUG ("No useful SegmentList node for the current Representation");
+      /* here we should have a single segment for each representation, whose URL is encoded in the baseURL element */
+      if (!gst_mpd_client2_add_media_segment (stream, NULL, 1, 0, 0,
+              PeriodEnd - PeriodStart, PeriodStart, PeriodEnd - PeriodStart)) {
+        return FALSE;
+      }
+    } else {
+      /* build the list of GstMediaSegment nodes from the SegmentList node */
+      SegmentURL = stream->cur_segment_list->SegmentURL;
+      if (SegmentURL == NULL) {
+        GST_WARNING
+            ("No valid list of SegmentURL nodes in the MPD file, aborting...");
+        return FALSE;
+      }
+
+      /* build segment list */
+      i = GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+          cur_segment_list)->startNumber;
+      start = 0;
+      start_time = PeriodStart;
+
+      GST_LOG ("Building media segment list using a SegmentList node");
+      if (GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+              cur_segment_list)->SegmentTimeline) {
+        GstMPDSegmentTimelineNode *timeline;
+        GstMPDSNode *S;
+        GList *list;
+        GstClockTime presentationTimeOffset;
+        GstMPDSegmentBaseNode *segbase;
+
+        segbase =
+            GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+            cur_segment_list)->SegmentBase;
+        presentationTimeOffset =
+            gst_util_uint64_scale (segbase->presentationTimeOffset, GST_SECOND,
+            segbase->timescale);
+        GST_LOG ("presentationTimeOffset = %" G_GUINT64_FORMAT,
+            presentationTimeOffset);
+
+        timeline =
+            GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+            cur_segment_list)->SegmentTimeline;
+        for (list = g_queue_peek_head_link (&timeline->S); list;
+            list = g_list_next (list)) {
+          guint timescale;
+
+          S = (GstMPDSNode *) list->data;
+          GST_LOG ("Processing S node: d=%" G_GUINT64_FORMAT " r=%d t=%"
+              G_GUINT64_FORMAT, S->d, S->r, S->t);
+          timescale =
+              GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+              cur_segment_list)->SegmentBase->timescale;
+          duration = gst_util_uint64_scale (S->d, GST_SECOND, timescale);
+
+          if (S->t > 0) {
+            start = S->t;
+            start_time = gst_util_uint64_scale (S->t, GST_SECOND, timescale)
+                + PeriodStart - presentationTimeOffset;
+          }
+
+          if (!SegmentURL) {
+            GST_WARNING
+                ("SegmentTimeline does not have a matching SegmentURL, aborting...");
+            return FALSE;
+          }
+
+          if (!gst_mpd_client2_add_media_segment (stream, SegmentURL->data, i,
+                  S->r, start, S->d, start_time, duration)) {
+            return FALSE;
+          }
+          i += S->r + 1;
+          start_time += duration * (S->r + 1);
+          start += S->d * (S->r + 1);
+          SegmentURL = g_list_next (SegmentURL);
+        }
+      } else {
+        guint64 scale_dur;
+
+        duration =
+            gst_mpd_client2_get_segment_duration (client, stream, &scale_dur);
+        if (!GST_CLOCK_TIME_IS_VALID (duration))
+          return FALSE;
+
+        while (SegmentURL) {
+          if (!gst_mpd_client2_add_media_segment (stream, SegmentURL->data, i,
+                  0, start, scale_dur, start_time, duration)) {
+            return FALSE;
+          }
+          i++;
+          start += scale_dur;
+          start_time += duration;
+          SegmentURL = g_list_next (SegmentURL);
+        }
+      }
+    }
+  } else {
+    if (representation->SegmentTemplate != NULL) {
+      stream->cur_seg_template = representation->SegmentTemplate;
+    } else if (stream->cur_adapt_set->SegmentTemplate != NULL) {
+      stream->cur_seg_template = stream->cur_adapt_set->SegmentTemplate;
+    } else if (stream_period->period->SegmentTemplate != NULL) {
+      stream->cur_seg_template = stream_period->period->SegmentTemplate;
+    }
+
+    if (stream->cur_seg_template == NULL) {
+
+      gst_mpdparser_init_active_stream_segments (stream);
+      /* here we should have a single segment for each representation, whose URL is encoded in the baseURL element */
+      if (!gst_mpd_client2_add_media_segment (stream, NULL, 1, 0, 0,
+              PeriodEnd - PeriodStart, 0, PeriodEnd - PeriodStart)) {
+        return FALSE;
+      }
+    } else {
+      GstClockTime presentationTimeOffset;
+      GstMPDMultSegmentBaseNode *mult_seg =
+          GST_MPD_MULT_SEGMENT_BASE_NODE (stream->cur_seg_template);
+      presentationTimeOffset =
+          gst_util_uint64_scale (mult_seg->SegmentBase->presentationTimeOffset,
+          GST_SECOND, mult_seg->SegmentBase->timescale);
+      GST_LOG ("presentationTimeOffset = %" GST_TIME_FORMAT,
+          GST_TIME_ARGS (presentationTimeOffset));
+      /* build segment list */
+      i = mult_seg->startNumber;
+      start = 0;
+      start_time = 0;
+
+      GST_LOG ("Building media segment list using this template: %s",
+          stream->cur_seg_template->media);
+
+      if (mult_seg->SegmentTimeline) {
+        GstMPDSegmentTimelineNode *timeline;
+        GstMPDSNode *S;
+        GList *list;
+
+        timeline = mult_seg->SegmentTimeline;
+        gst_mpdparser_init_active_stream_segments (stream);
+        for (list = g_queue_peek_head_link (&timeline->S); list;
+            list = g_list_next (list)) {
+          guint timescale;
+
+          S = (GstMPDSNode *) list->data;
+          GST_LOG ("Processing S node: d=%" G_GUINT64_FORMAT " r=%u t=%"
+              G_GUINT64_FORMAT, S->d, S->r, S->t);
+          timescale = mult_seg->SegmentBase->timescale;
+          duration = gst_util_uint64_scale (S->d, GST_SECOND, timescale);
+          if (S->t > 0) {
+            start = S->t;
+            start_time = gst_util_uint64_scale (S->t, GST_SECOND, timescale)
+                + PeriodStart - presentationTimeOffset;
+          }
+
+          if (!gst_mpd_client2_add_media_segment (stream, NULL, i, S->r, start,
+                  S->d, start_time, duration)) {
+            return FALSE;
+          }
+          i += S->r + 1;
+          start += S->d * (S->r + 1);
+          start_time += duration * (S->r + 1);
+        }
+      } else {
+        /* NOP - The segment is created on demand with the template, no need
+         * to build a list */
+      }
+    }
+  }
+
+  /* clip duration of segments to stop at period end */
+  if (stream->segments && stream->segments->len) {
+    if (GST_CLOCK_TIME_IS_VALID (PeriodEnd)) {
+      guint n;
+
+      for (n = 0; n < stream->segments->len; ++n) {
+        GstMediaSegment *media_segment =
+            g_ptr_array_index (stream->segments, n);
+        if (media_segment) {
+          if (media_segment->start + media_segment->duration > PeriodEnd) {
+            GstClockTime stop = PeriodEnd;
+            if (n < stream->segments->len - 1) {
+              GstMediaSegment *next_segment =
+                  g_ptr_array_index (stream->segments, n + 1);
+              if (next_segment && next_segment->start < PeriodEnd)
+                stop = next_segment->start;
+            }
+            media_segment->duration =
+                media_segment->start > stop ? 0 : stop - media_segment->start;
+            GST_LOG ("Fixed duration of segment %u: %" GST_TIME_FORMAT, n,
+                GST_TIME_ARGS (media_segment->duration));
+
+            /* If the segment was clipped entirely, we discard it and all
+             * subsequent ones */
+            if (media_segment->duration == 0) {
+              GST_WARNING ("Discarding %u segments outside period",
+                  stream->segments->len - n);
+              /* _set_size should properly unref elements */
+              g_ptr_array_set_size (stream->segments, n);
+              break;
+            }
+          }
+        }
+      }
+    }
+#ifndef GST_DISABLE_GST_DEBUG
+    if (stream->segments->len > 0) {
+      GstMediaSegment *last_media_segment =
+          g_ptr_array_index (stream->segments, stream->segments->len - 1);
+      GST_LOG ("Built a list of %d segments", last_media_segment->number);
+    } else {
+      GST_LOG ("All media segments were clipped");
+    }
+#endif
+  }
+
+  g_free (stream->baseURL);
+  g_free (stream->queryURL);
+  stream->baseURL =
+      gst_mpd_client2_parse_baseURL (client, stream, &stream->queryURL);
+
+  gst_mpd_client2_stream_update_presentation_time_offset (client, stream);
+
+  return TRUE;
+}
+
+#define CUSTOM_WRAPPER_START "<custom_wrapper>"
+#define CUSTOM_WRAPPER_END "</custom_wrapper>"
+
+static GList *
+gst_mpd_client2_fetch_external_periods (GstMPDClient2 * client,
+    GstMPDPeriodNode * period_node)
+{
+  DownloadRequest *download;
+  GstBuffer *period_buffer;
+  GError *err = NULL;
+
+  GstUri *base_uri, *uri;
+  gchar *query = NULL;
+  gchar *uri_string, *wrapper;
+  GList *new_periods = NULL;
+  const gchar *data;
+
+  /* ISO/IEC 23009-1:2014 5.5.3 4)
+   * Remove nodes that resolve to nothing when resolving
+   */
+  if (strcmp (period_node->xlink_href,
+          "urn:mpeg:dash:resolve-to-zero:2013") == 0) {
+    return NULL;
+  }
+
+  if (!client->download_helper) {
+    return NULL;
+  }
+
+  /* Build absolute URI */
+
+  /* Get base URI at the MPD level */
+  base_uri =
+      gst_uri_from_string (client->mpd_base_uri ? client->
+      mpd_base_uri : client->mpd_uri);
+
+  /* combine a BaseURL at the MPD level with the current base url */
+  base_uri =
+      gst_mpd_helper_combine_urls (base_uri, client->mpd_root_node->BaseURLs,
+      &query, 0);
+  uri = gst_uri_from_string_with_base (base_uri, period_node->xlink_href);
+  if (query)
+    gst_uri_set_query_string (uri, query);
+  g_free (query);
+  uri_string = gst_uri_to_string (uri);
+  gst_uri_unref (base_uri);
+  gst_uri_unref (uri);
+
+  download =
+      downloadhelper_fetch_uri (client->download_helper,
+      uri_string, client->mpd_uri,
+      DOWNLOAD_FLAG_COMPRESS | DOWNLOAD_FLAG_FORCE_REFRESH, &err);
+  g_free (uri_string);
+
+  if (!download) {
+    GST_ERROR ("Failed to download external Period node at '%s': %s",
+        period_node->xlink_href, err->message);
+    g_clear_error (&err);
+    return NULL;
+  }
+
+  period_buffer = download_request_take_buffer (download);
+  download_request_unref (download);
+
+  if (period_buffer) {
+    GstAdapter *adapter;
+    /* external xml could have multiple period without root xmlNode.
+     * To avoid xml parsing error caused by no root node, wrapping it with
+     * custom root node */
+    adapter = gst_adapter_new ();
+
+    wrapper = g_new (gchar, strlen (CUSTOM_WRAPPER_START));
+    memcpy (wrapper, CUSTOM_WRAPPER_START, strlen (CUSTOM_WRAPPER_START));
+    gst_adapter_push (adapter,
+        gst_buffer_new_wrapped (wrapper, strlen (CUSTOM_WRAPPER_START)));
+
+    gst_adapter_push (adapter, period_buffer);
+
+    wrapper = g_strdup (CUSTOM_WRAPPER_END);
+    gst_adapter_push (adapter,
+        gst_buffer_new_wrapped (wrapper, strlen (CUSTOM_WRAPPER_END) + 1));
+
+    data = gst_adapter_map (adapter, gst_adapter_available (adapter));
+
+    new_periods =
+        gst_mpdparser_get_external_periods (data,
+        gst_adapter_available (adapter));
+
+    gst_adapter_unmap (adapter);
+    gst_adapter_clear (adapter);
+    gst_object_unref (adapter);
+  }
+
+  return new_periods;
+}
+
+gboolean
+gst_mpd_client2_setup_media_presentation (GstMPDClient2 * client,
+    GstClockTime time, gint period_idx, const gchar * period_id)
+{
+  GstStreamPeriod *stream_period;
+  GstClockTime start, duration;
+  GList *list, *next;
+  guint idx;
+  gboolean ret = FALSE;
+
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+  /* Check if we set up the media presentation far enough already */
+  for (list = client->periods; list; list = list->next) {
+    GstStreamPeriod *stream_period = list->data;
+
+    if ((time != GST_CLOCK_TIME_NONE
+            && stream_period->duration != GST_CLOCK_TIME_NONE
+            && stream_period->start + stream_period->duration >= time)
+        || (time != GST_CLOCK_TIME_NONE && stream_period->start >= time))
+      return TRUE;
+
+    if (period_idx != -1 && stream_period->number >= period_idx)
+      return TRUE;
+
+    if (period_id != NULL && stream_period->period->id != NULL
+        && strcmp (stream_period->period->id, period_id) == 0)
+      return TRUE;
+
+  }
+
+  GST_DEBUG ("Building the list of Periods in the Media Presentation");
+  /* clean the old period list, if any */
+  /* TODO: In theory we could reuse the ones we have so far but that
+   * seems more complicated than the overhead caused here
+   */
+  if (client->periods) {
+    g_list_foreach (client->periods,
+        (GFunc) gst_mpdparser_free_stream_period, NULL);
+    g_list_free (client->periods);
+    client->periods = NULL;
+  }
+
+  idx = 0;
+  start = 0;
+  duration = GST_CLOCK_TIME_NONE;
+
+  if (client->mpd_root_node->mediaPresentationDuration <= 0 &&
+      client->mpd_root_node->mediaPresentationDuration != -1) {
+    /* Invalid MPD file: MPD duration is negative or zero */
+    goto syntax_error;
+  }
+
+  for (list = client->mpd_root_node->Periods; list;
+      /* explicitly advanced below */ ) {
+    GstMPDPeriodNode *period_node = list->data;
+    GstMPDPeriodNode *next_period_node = NULL;
+
+    /* Download external period */
+    if (period_node->xlink_href) {
+      GList *new_periods;
+      GList *prev;
+
+      new_periods =
+          gst_mpd_client2_fetch_external_periods (client, period_node);
+
+      prev = list->prev;
+      client->mpd_root_node->Periods =
+          g_list_delete_link (client->mpd_root_node->Periods, list);
+      gst_mpd_period_node_free (period_node);
+      period_node = NULL;
+
+      /* Get new next node, we will insert before this */
+      if (prev)
+        next = prev->next;
+      else
+        next = client->mpd_root_node->Periods;
+
+      while (new_periods) {
+        client->mpd_root_node->Periods =
+            g_list_insert_before (client->mpd_root_node->Periods, next,
+            new_periods->data);
+        new_periods = g_list_delete_link (new_periods, new_periods);
+      }
+      next = NULL;
+
+      /* Update our iterator to the first new period if any, or the next */
+      if (prev)
+        list = prev->next;
+      else
+        list = client->mpd_root_node->Periods;
+
+      /* And try again */
+      continue;
+    }
+
+    if (period_node->start != -1) {
+      /* we have a regular period */
+      /* start cannot be smaller than previous start */
+      if (list != g_list_first (client->mpd_root_node->Periods)
+          && start >= period_node->start * GST_MSECOND) {
+        /* Invalid MPD file: duration would be negative or zero */
+        goto syntax_error;
+      }
+      start = period_node->start * GST_MSECOND;
+    } else if (duration != GST_CLOCK_TIME_NONE) {
+      /* start time inferred from previous period, this is still a regular period */
+      start += duration;
+    } else if (idx == 0
+        && client->mpd_root_node->type == GST_MPD_FILE_TYPE_STATIC) {
+      /* first period of a static MPD file, start time is 0 */
+      start = 0;
+    } else if (client->mpd_root_node->type == GST_MPD_FILE_TYPE_DYNAMIC) {
+      /* this should be a live stream, let this pass */
+    } else {
+      /* this is an 'Early Available Period' */
+      goto early;
+    }
+
+    /* compute duration.
+       If there is a start time for the next period, or this is the last period
+       and mediaPresentationDuration was set, those values will take precedence
+       over a configured period duration in computing this period's duration
+
+       ISO/IEC 23009-1:2014(E), chapter 5.3.2.1
+       "The Period extends until the PeriodStart of the next Period, or until
+       the end of the Media Presentation in the case of the last Period."
+     */
+
+    while ((next = g_list_next (list)) != NULL) {
+      /* try to infer this period duration from the start time of the next period */
+      next_period_node = next->data;
+
+      if (next_period_node->xlink_href) {
+        GList *new_periods;
+
+        new_periods =
+            gst_mpd_client2_fetch_external_periods (client, next_period_node);
+
+        client->mpd_root_node->Periods =
+            g_list_delete_link (client->mpd_root_node->Periods, next);
+        gst_mpd_period_node_free (next_period_node);
+        next_period_node = NULL;
+        /* Get new next node, we will insert before this */
+        next = g_list_next (list);
+        while (new_periods) {
+          client->mpd_root_node->Periods =
+              g_list_insert_before (client->mpd_root_node->Periods, next,
+              new_periods->data);
+          new_periods = g_list_delete_link (new_periods, new_periods);
+        }
+
+        /* And try again, getting the next list element which is now our newly
+         * inserted nodes. If any */
+      } else {
+        /* Got the next period and it doesn't have to be downloaded first */
+        break;
+      }
+    }
+
+    if (next_period_node) {
+      if (next_period_node->start != -1) {
+        if (start >= next_period_node->start * GST_MSECOND) {
+          /* Invalid MPD file: duration would be negative or zero */
+          goto syntax_error;
+        }
+        duration = next_period_node->start * GST_MSECOND - start;
+      } else if (period_node->duration != -1) {
+        if (period_node->duration <= 0) {
+          /* Invalid MPD file: duration would be negative or zero */
+          goto syntax_error;
+        }
+        duration = period_node->duration * GST_MSECOND;
+      } else if (client->mpd_root_node->type == GST_MPD_FILE_TYPE_DYNAMIC) {
+        /* might be a live file, ignore unspecified duration */
+      } else {
+        /* Invalid MPD file! */
+        goto syntax_error;
+      }
+    } else if (client->mpd_root_node->mediaPresentationDuration != -1) {
+      /* last Period of the Media Presentation */
+      if (client->mpd_root_node->mediaPresentationDuration * GST_MSECOND <=
+          start) {
+        /* Invalid MPD file: duration would be negative or zero */
+        goto syntax_error;
+      }
+      duration =
+          client->mpd_root_node->mediaPresentationDuration * GST_MSECOND -
+          start;
+    } else if (period_node->duration != -1) {
+      duration = period_node->duration * GST_MSECOND;
+    } else if (client->mpd_root_node->type == GST_MPD_FILE_TYPE_DYNAMIC) {
+      /* might be a live file, ignore unspecified duration */
+    } else {
+      /* Invalid MPD file! */
+      GST_ERROR
+          ("Invalid MPD file. The MPD is static without a valid duration");
+      goto syntax_error;
+    }
+
+    stream_period = g_slice_new0 (GstStreamPeriod);
+    client->periods = g_list_append (client->periods, stream_period);
+    stream_period->period = period_node;
+    stream_period->number = idx++;
+    stream_period->start = start;
+    stream_period->duration = duration;
+    ret = TRUE;
+    GST_LOG (" - added Period %d start=%" GST_TIME_FORMAT " duration=%"
+        GST_TIME_FORMAT, idx, GST_TIME_ARGS (start), GST_TIME_ARGS (duration));
+
+    if ((time != GST_CLOCK_TIME_NONE
+            && stream_period->duration != GST_CLOCK_TIME_NONE
+            && stream_period->start + stream_period->duration >= time)
+        || (time != GST_CLOCK_TIME_NONE && stream_period->start >= time))
+      break;
+
+    if (period_idx != -1 && stream_period->number >= period_idx)
+      break;
+
+    if (period_id != NULL && stream_period->period->id != NULL
+        && strcmp (stream_period->period->id, period_id) == 0)
+      break;
+
+    list = list->next;
+  }
+
+  GST_DEBUG
+      ("Found a total of %d valid Periods in the Media Presentation up to this point",
+      idx);
+  return ret;
+
+early:
+  GST_WARNING
+      ("Found an Early Available Period, skipping the rest of the Media Presentation");
+  return ret;
+
+syntax_error:
+  GST_WARNING
+      ("Cannot get the duration of the Period %d, skipping the rest of the Media Presentation",
+      idx);
+  return ret;
+}
+
+static GList *
+gst_mpd_client2_fetch_external_adaptation_set (GstMPDClient2 * client,
+    GstMPDPeriodNode * period, GstMPDAdaptationSetNode * adapt_set)
+{
+  DownloadRequest *download;
+  GstBuffer *adapt_set_buffer;
+  GError *err = NULL;
+  GstUri *base_uri, *uri;
+  gchar *query = NULL;
+  gchar *uri_string;
+  GList *new_adapt_sets = NULL;
+
+  /* ISO/IEC 23009-1:2014 5.5.3 4)
+   * Remove nodes that resolve to nothing when resolving
+   */
+  if (strcmp (adapt_set->xlink_href, "urn:mpeg:dash:resolve-to-zero:2013") == 0) {
+    return NULL;
+  }
+
+  if (!client->download_helper) {
+    return NULL;
+  }
+
+  /* Build absolute URI */
+
+  /* Get base URI at the MPD level */
+  base_uri =
+      gst_uri_from_string (client->mpd_base_uri ? client->
+      mpd_base_uri : client->mpd_uri);
+
+  /* combine a BaseURL at the MPD level with the current base url */
+  base_uri =
+      gst_mpd_helper_combine_urls (base_uri, client->mpd_root_node->BaseURLs,
+      &query, 0);
+
+  /* combine a BaseURL at the Period level with the current base url */
+  base_uri =
+      gst_mpd_helper_combine_urls (base_uri, period->BaseURLs, &query, 0);
+
+  uri = gst_uri_from_string_with_base (base_uri, adapt_set->xlink_href);
+  if (query)
+    gst_uri_set_query_string (uri, query);
+  g_free (query);
+  uri_string = gst_uri_to_string (uri);
+  gst_uri_unref (base_uri);
+  gst_uri_unref (uri);
+
+  download =
+      downloadhelper_fetch_uri (client->download_helper,
+      uri_string, client->mpd_uri,
+      DOWNLOAD_FLAG_COMPRESS | DOWNLOAD_FLAG_FORCE_REFRESH, &err);
+  g_free (uri_string);
+
+  if (!download) {
+    GST_ERROR ("Failed to download external AdaptationSet node at '%s': %s",
+        adapt_set->xlink_href, err->message);
+    g_clear_error (&err);
+    return NULL;
+  }
+
+  adapt_set_buffer = download_request_take_buffer (download);
+  download_request_unref (download);
+
+  if (adapt_set_buffer) {
+    GstMapInfo map;
+    gst_buffer_map (adapt_set_buffer, &map, GST_MAP_READ);
+
+    new_adapt_sets =
+        gst_mpdparser_get_external_adaptation_sets ((const gchar *) map.data,
+        map.size, period);
+
+    gst_buffer_unmap (adapt_set_buffer, &map);
+    gst_buffer_unref (adapt_set_buffer);
+  }
+
+  return new_adapt_sets;
+}
+
+static GList *
+gst_mpd_client2_get_adaptation_sets_for_period (GstMPDClient2 * client,
+    GstStreamPeriod * period)
+{
+  GList *list;
+
+  g_return_val_if_fail (period != NULL, NULL);
+
+  /* Resolve all external adaptation sets of this period. Every user of
+   * the adaptation sets would need to know the content of all adaptation sets
+   * to decide which one to use, so we have to resolve them all here
+   */
+  for (list = period->period->AdaptationSets; list;
+      /* advanced explicitly below */ ) {
+    GstMPDAdaptationSetNode *adapt_set = (GstMPDAdaptationSetNode *) list->data;
+    GList *new_adapt_sets = NULL, *prev, *next;
+
+    if (!adapt_set->xlink_href) {
+      list = list->next;
+      continue;
+    }
+
+    new_adapt_sets =
+        gst_mpd_client2_fetch_external_adaptation_set (client, period->period,
+        adapt_set);
+
+    prev = list->prev;
+    period->period->AdaptationSets =
+        g_list_delete_link (period->period->AdaptationSets, list);
+    gst_mpd_adaptation_set_node_free (adapt_set);
+    adapt_set = NULL;
+
+    /* Get new next node, we will insert before this */
+    if (prev)
+      next = prev->next;
+    else
+      next = period->period->AdaptationSets;
+
+    while (new_adapt_sets) {
+      period->period->AdaptationSets =
+          g_list_insert_before (period->period->AdaptationSets, next,
+          new_adapt_sets->data);
+      new_adapt_sets = g_list_delete_link (new_adapt_sets, new_adapt_sets);
+    }
+
+    /* Update our iterator to the first new adaptation set if any, or the next */
+    if (prev)
+      list = prev->next;
+    else
+      list = period->period->AdaptationSets;
+  }
+
+  return period->period->AdaptationSets;
+}
+
+GList *
+gst_mpd_client2_get_adaptation_sets (GstMPDClient2 * client)
+{
+  GstStreamPeriod *stream_period;
+
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  if (stream_period == NULL || stream_period->period == NULL) {
+    GST_DEBUG ("No more Period nodes in the MPD file, terminating...");
+    return NULL;
+  }
+
+  return gst_mpd_client2_get_adaptation_sets_for_period (client, stream_period);
+}
+
+gboolean
+gst_mpd_client2_setup_streaming (GstMPDClient2 * client,
+    GstMPDAdaptationSetNode * adapt_set)
+{
+  GstMPDRepresentationNode *representation;
+  GList *rep_list = NULL;
+  GstActiveStream *stream;
+
+  rep_list = adapt_set->Representations;
+  if (!rep_list) {
+    GST_WARNING ("Can not retrieve any representation, aborting...");
+    return FALSE;
+  }
+
+  stream = g_slice_new0 (GstActiveStream);
+  gst_mpdparser_init_active_stream_segments (stream);
+
+  stream->baseURL_idx = 0;
+  stream->cur_adapt_set = adapt_set;
+
+  GST_DEBUG ("0. Current stream %p", stream);
+
+#if 0
+  /* fast start */
+  representation =
+      gst_mpdparser_get_representation_with_max_bandwidth (rep_list,
+      stream->max_bandwidth);
+
+  if (!representation) {
+    GST_WARNING
+        ("Can not retrieve a representation with the requested bandwidth");
+    representation = gst_mpd_client2_get_lowest_representation (rep_list);
+  }
+#else
+  /* slow start */
+  representation = gst_mpd_client2_get_lowest_representation (rep_list);
+#endif
+
+  if (!representation) {
+    GST_WARNING ("No valid representation in the MPD file, aborting...");
+    gst_mpdparser_free_active_stream (stream);
+    return FALSE;
+  }
+  stream->mimeType =
+      gst_mpdparser_representation_get_mimetype (adapt_set, representation);
+  if (stream->mimeType == GST_STREAM_UNKNOWN) {
+    GST_WARNING ("Unknown mime type in the representation, aborting...");
+    gst_mpdparser_free_active_stream (stream);
+    return FALSE;
+  }
+
+  client->active_streams = g_list_append (client->active_streams, stream);
+  if (!gst_mpd_client2_setup_representation (client, stream, representation)) {
+    GST_WARNING ("Failed to setup the representation, aborting...");
+    return FALSE;
+  }
+
+  GST_INFO ("Successfully setup the download pipeline for mimeType %d",
+      stream->mimeType);
+
+  return TRUE;
+}
+
+gboolean
+gst_mpd_client2_stream_seek (GstMPDClient2 * client, GstActiveStream * stream,
+    gboolean forward, GstSeekFlags flags, GstClockTime ts,
+    GstClockTime * final_ts)
+{
+  gint index = 0;
+  gint repeat_index = 0;
+  GstMediaSegment *selectedChunk = NULL;
+
+  g_return_val_if_fail (stream != NULL, 0);
+
+  if (stream->segments) {
+    for (index = 0; index < stream->segments->len; index++) {
+      gboolean in_segment = FALSE;
+      GstMediaSegment *segment = g_ptr_array_index (stream->segments, index);
+      GstClockTime end_time;
+
+      GST_DEBUG ("Looking at fragment sequence chunk %d / %d", index,
+          stream->segments->len);
+
+      end_time =
+          gst_mpd_client2_get_segment_end_time (client, stream->segments,
+          segment, index);
+
+      /* avoid downloading another fragment just for 1ns in reverse mode */
+      if (forward)
+        in_segment = ts < end_time;
+      else
+        in_segment = ts <= end_time;
+
+      if (in_segment) {
+        GstClockTime chunk_time;
+
+        selectedChunk = segment;
+        repeat_index = (ts - segment->start) / segment->duration;
+
+        chunk_time = segment->start + segment->duration * repeat_index;
+
+        /* At the end of a segment in reverse mode, start from the previous fragment */
+        if (!forward && repeat_index > 0
+            && ((ts - segment->start) % segment->duration == 0))
+          repeat_index--;
+
+        if ((flags & GST_SEEK_FLAG_SNAP_NEAREST) == GST_SEEK_FLAG_SNAP_NEAREST) {
+          if (repeat_index + 1 < segment->repeat) {
+            if (ts - chunk_time > chunk_time + segment->duration - ts)
+              repeat_index++;
+          } else if (index + 1 < stream->segments->len) {
+            GstMediaSegment *next_segment =
+                g_ptr_array_index (stream->segments, index + 1);
+
+            if (ts - chunk_time > next_segment->start - ts) {
+              repeat_index = 0;
+              selectedChunk = next_segment;
+              index++;
+            }
+          }
+        } else if (((forward && flags & GST_SEEK_FLAG_SNAP_AFTER) ||
+                (!forward && flags & GST_SEEK_FLAG_SNAP_BEFORE)) &&
+            ts != chunk_time) {
+
+          if (repeat_index + 1 < segment->repeat) {
+            repeat_index++;
+          } else {
+            repeat_index = 0;
+            if (index + 1 >= stream->segments->len) {
+              selectedChunk = NULL;
+            } else {
+              selectedChunk = g_ptr_array_index (stream->segments, ++index);
+            }
+          }
+        }
+        break;
+      }
+    }
+
+    if (selectedChunk == NULL) {
+      stream->segment_index = stream->segments->len;
+      stream->segment_repeat_index = 0;
+      GST_DEBUG ("Seek to after last segment");
+      return FALSE;
+    }
+
+    if (final_ts)
+      *final_ts = selectedChunk->start + selectedChunk->duration * repeat_index;
+  } else {
+    GstClockTime duration =
+        gst_mpd_client2_get_segment_duration (client, stream, NULL);
+    GstStreamPeriod *stream_period = gst_mpd_client2_get_stream_period (client);
+    guint segments_count = gst_mpd_client2_get_segments_counts (client, stream);
+    GstClockTime index_time;
+
+    g_return_val_if_fail (GST_MPD_MULT_SEGMENT_BASE_NODE
+        (stream->cur_seg_template)->SegmentTimeline == NULL, FALSE);
+    if (!GST_CLOCK_TIME_IS_VALID (duration)) {
+      return FALSE;
+    }
+
+    if (ts > stream_period->start)
+      ts -= stream_period->start;
+    else
+      ts = 0;
+
+    index = ts / duration;
+
+    /* At the end of a segment in reverse mode, start from the previous fragment */
+    if (!forward && index > 0 && ts % duration == 0)
+      index--;
+
+    index_time = index * duration;
+
+    if ((flags & GST_SEEK_FLAG_SNAP_NEAREST) == GST_SEEK_FLAG_SNAP_NEAREST) {
+      if (ts - index_time > index_time + duration - ts)
+        index++;
+    } else if (((forward && flags & GST_SEEK_FLAG_SNAP_AFTER) ||
+            (!forward && flags & GST_SEEK_FLAG_SNAP_BEFORE))
+        && ts != index_time) {
+      index++;
+    }
+
+    if (segments_count > 0 && index >= segments_count) {
+      stream->segment_index = segments_count;
+      stream->segment_repeat_index = 0;
+      GST_DEBUG ("Seek to after last segment");
+      return FALSE;
+    }
+    if (final_ts)
+      *final_ts = index * duration;
+  }
+
+  stream->segment_repeat_index = repeat_index;
+  stream->segment_index = index;
+
+  return TRUE;
+}
+
+GstClockTimeDiff
+gst_mpd_client2_calculate_time_difference (const GstDateTime * t1,
+    const GstDateTime * t2)
+{
+  GDateTime *gdt1, *gdt2;
+  GTimeSpan diff;
+
+  g_assert (t1 != NULL && t2 != NULL);
+  gdt1 = gst_date_time_to_g_date_time ((GstDateTime *) t1);
+  gdt2 = gst_date_time_to_g_date_time ((GstDateTime *) t2);
+  diff = g_date_time_difference (gdt2, gdt1);
+  g_date_time_unref (gdt1);
+  g_date_time_unref (gdt2);
+  return diff * GST_USECOND;
+}
+
+GstDateTime *
+gst_mpd_client2_add_time_difference (GstDateTime * t1, GstClockTimeDiff diff)
+{
+  GDateTime *gdt;
+  GDateTime *gdt2;
+  GstDateTime *rv;
+
+  g_assert (t1 != NULL);
+  gdt = gst_date_time_to_g_date_time (t1);
+  g_assert (gdt != NULL);
+  gdt2 = g_date_time_add (gdt, diff / GST_USECOND);
+  g_assert (gdt2 != NULL);
+  g_date_time_unref (gdt);
+  rv = gst_date_time_new_from_g_date_time (gdt2);
+
+  /* Don't g_date_time_unref(gdt2) because gst_date_time_new_from_g_date_time takes
+   * ownership of the GDateTime pointer.
+   */
+
+  return rv;
+}
+
+gboolean
+gst_mpd_client2_get_last_fragment_timestamp_end (GstMPDClient2 * client,
+    guint stream_idx, GstClockTime * ts)
+{
+  GstActiveStream *stream;
+  gint segment_idx;
+  GstMediaSegment *currentChunk;
+  GstStreamPeriod *stream_period;
+
+  GST_DEBUG ("Stream index: %i", stream_idx);
+  stream = g_list_nth_data (client->active_streams, stream_idx);
+  g_return_val_if_fail (stream != NULL, 0);
+
+  if (!stream->segments) {
+    stream_period = gst_mpd_client2_get_stream_period (client);
+    *ts = stream_period->start + stream_period->duration;
+  } else {
+    segment_idx = gst_mpd_client2_get_segments_counts (client, stream) - 1;
+    if (segment_idx >= stream->segments->len) {
+      GST_WARNING ("Segment index %d is outside of segment list of length %d",
+          segment_idx, stream->segments->len);
+      return FALSE;
+    }
+    currentChunk = g_ptr_array_index (stream->segments, segment_idx);
+
+    if (currentChunk->repeat >= 0) {
+      *ts =
+          currentChunk->start + (currentChunk->duration * (1 +
+              currentChunk->repeat));
+    } else {
+      /* 5.3.9.6.1: negative repeat means repeat till the end of the
+       * period, or the next update of the MPD (which I think is
+       * implicit, as this will all get deleted/recreated), or the
+       * start of the next segment, if any. */
+      stream_period = gst_mpd_client2_get_stream_period (client);
+      *ts = stream_period->start + stream_period->duration;
+    }
+  }
+
+  return TRUE;
+}
+
+gboolean
+gst_mpd_client2_get_next_fragment_timestamp (GstMPDClient2 * client,
+    guint stream_idx, GstClockTime * ts)
+{
+  GstActiveStream *stream;
+  GstMediaSegment *currentChunk;
+
+  GST_DEBUG ("Stream index: %i", stream_idx);
+  stream = g_list_nth_data (client->active_streams, stream_idx);
+  g_return_val_if_fail (stream != NULL, 0);
+
+  if (stream->segments) {
+    GST_DEBUG ("Looking for fragment sequence chunk %d / %d",
+        stream->segment_index, stream->segments->len);
+    if (stream->segment_index >= stream->segments->len)
+      return FALSE;
+    currentChunk = g_ptr_array_index (stream->segments, stream->segment_index);
+
+    *ts =
+        currentChunk->start +
+        (currentChunk->duration * stream->segment_repeat_index);
+  } else {
+    GstClockTime duration =
+        gst_mpd_client2_get_segment_duration (client, stream, NULL);
+    guint segments_count = gst_mpd_client2_get_segments_counts (client, stream);
+
+    g_return_val_if_fail (GST_MPD_MULT_SEGMENT_BASE_NODE
+        (stream->cur_seg_template)->SegmentTimeline == NULL, FALSE);
+    if (!GST_CLOCK_TIME_IS_VALID (duration) || (segments_count > 0
+            && stream->segment_index >= segments_count)) {
+      return FALSE;
+    }
+    *ts = stream->segment_index * duration;
+  }
+
+  return TRUE;
+}
+
+GstClockTime
+gst_mpd_client2_get_stream_presentation_offset (GstMPDClient2 * client,
+    guint stream_idx)
+{
+  GstActiveStream *stream = NULL;
+
+  g_return_val_if_fail (client != NULL, 0);
+  g_return_val_if_fail (client->active_streams != NULL, 0);
+  stream = g_list_nth_data (client->active_streams, stream_idx);
+  g_return_val_if_fail (stream != NULL, 0);
+
+  return stream->presentationTimeOffset;
+}
+
+GstClockTime
+gst_mpd_client2_get_period_start_time (GstMPDClient2 * client)
+{
+  GstStreamPeriod *stream_period = NULL;
+
+  g_return_val_if_fail (client != NULL, 0);
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  g_return_val_if_fail (stream_period != NULL, 0);
+
+  return stream_period->start;
+}
+
+/**
+ * gst_mpd_client2_get_utc_timing_sources:
+ * @client: #GstMPDClient2 to check for UTCTiming elements
+ * @methods: A bit mask of #GstMPDUTCTimingType that specifies the methods
+ *     to search for.
+ * @selected_method: (nullable): The selected method
+ * Returns: (transfer none): A NULL terminated array of URLs of servers
+ *     that use @selected_method to provide a realtime clock.
+ *
+ * Searches the UTCTiming elements found in the manifest for an element
+ * that uses one of the UTC timing methods specified in @selected_method.
+ * If multiple UTCTiming elements are present that support one of the
+ * methods specified in @selected_method, the first one is returned.
+ *
+ * Since: 1.6
+ */
+gchar **
+gst_mpd_client2_get_utc_timing_sources (GstMPDClient2 * client,
+    guint methods, GstMPDUTCTimingType * selected_method)
+{
+  GList *list;
+
+  g_return_val_if_fail (client != NULL, NULL);
+  g_return_val_if_fail (client->mpd_root_node != NULL, NULL);
+  for (list = g_list_first (client->mpd_root_node->UTCTimings); list;
+      list = g_list_next (list)) {
+    const GstMPDUTCTimingNode *node = (const GstMPDUTCTimingNode *) list->data;
+    if (node->method & methods) {
+      if (selected_method) {
+        *selected_method = node->method;
+      }
+      return node->urls;
+    }
+  }
+  return NULL;
+}
+
+
+gboolean
+gst_mpd_client2_get_next_fragment (GstMPDClient2 * client,
+    guint indexStream, GstMediaFragmentInfo * fragment)
+{
+  GstActiveStream *stream = NULL;
+  GstMediaSegment *currentChunk;
+  gchar *mediaURL = NULL;
+  gchar *indexURL = NULL;
+  GstUri *base_url, *frag_url;
+
+  /* select stream */
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->active_streams != NULL, FALSE);
+  stream = g_list_nth_data (client->active_streams, indexStream);
+  g_return_val_if_fail (stream != NULL, FALSE);
+  g_return_val_if_fail (stream->cur_representation != NULL, FALSE);
+
+  if (stream->segments) {
+    GST_DEBUG ("Looking for fragment sequence chunk %d / %d",
+        stream->segment_index, stream->segments->len);
+    if (stream->segment_index >= stream->segments->len)
+      return FALSE;
+  } else {
+    GstClockTime duration = gst_mpd_client2_get_segment_duration (client,
+        stream, NULL);
+    guint segments_count = gst_mpd_client2_get_segments_counts (client, stream);
+
+    g_return_val_if_fail (GST_MPD_MULT_SEGMENT_BASE_NODE
+        (stream->cur_seg_template)->SegmentTimeline == NULL, FALSE);
+    if (!GST_CLOCK_TIME_IS_VALID (duration) || (segments_count > 0
+            && stream->segment_index >= segments_count)) {
+      return FALSE;
+    }
+    fragment->duration = duration;
+  }
+
+  /* FIXME rework discont checking */
+  /* fragment->discontinuity = segment_idx != currentChunk.number; */
+  fragment->range_start = 0;
+  fragment->range_end = -1;
+  fragment->index_uri = NULL;
+  fragment->index_range_start = 0;
+  fragment->index_range_end = -1;
+
+  if (stream->segments) {
+    currentChunk = g_ptr_array_index (stream->segments, stream->segment_index);
+
+    GST_DEBUG ("currentChunk->SegmentURL = %p", currentChunk->SegmentURL);
+    if (currentChunk->SegmentURL != NULL) {
+      mediaURL =
+          g_strdup (gst_mpdparser_get_mediaURL (stream,
+              currentChunk->SegmentURL));
+      indexURL = g_strdup (currentChunk->SegmentURL->index);
+    } else if (stream->cur_seg_template != NULL) {
+      mediaURL =
+          gst_mpdparser_build_URL_from_template (stream->cur_seg_template->
+          media, stream->cur_representation->id,
+          currentChunk->number + stream->segment_repeat_index,
+          stream->cur_representation->bandwidth,
+          currentChunk->scale_start +
+          stream->segment_repeat_index * currentChunk->scale_duration);
+      if (stream->cur_seg_template->index) {
+        indexURL =
+            gst_mpdparser_build_URL_from_template (stream->cur_seg_template->
+            index, stream->cur_representation->id,
+            currentChunk->number + stream->segment_repeat_index,
+            stream->cur_representation->bandwidth,
+            currentChunk->scale_start +
+            stream->segment_repeat_index * currentChunk->scale_duration);
+      }
+    }
+    GST_DEBUG ("mediaURL = %s", mediaURL);
+    GST_DEBUG ("indexURL = %s", indexURL);
+
+    fragment->timestamp =
+        currentChunk->start +
+        stream->segment_repeat_index * currentChunk->duration;
+    fragment->duration = currentChunk->duration;
+    if (currentChunk->SegmentURL) {
+      if (currentChunk->SegmentURL->mediaRange) {
+        fragment->range_start =
+            currentChunk->SegmentURL->mediaRange->first_byte_pos;
+        fragment->range_end =
+            currentChunk->SegmentURL->mediaRange->last_byte_pos;
+      }
+      if (currentChunk->SegmentURL->indexRange) {
+        fragment->index_range_start =
+            currentChunk->SegmentURL->indexRange->first_byte_pos;
+        fragment->index_range_end =
+            currentChunk->SegmentURL->indexRange->last_byte_pos;
+      }
+    }
+  } else {
+    if (stream->cur_seg_template != NULL) {
+      mediaURL =
+          gst_mpdparser_build_URL_from_template (stream->cur_seg_template->
+          media, stream->cur_representation->id,
+          stream->segment_index +
+          GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+              cur_seg_template)->startNumber,
+          stream->cur_representation->bandwidth,
+          stream->segment_index * fragment->duration);
+      if (stream->cur_seg_template->index) {
+        indexURL =
+            gst_mpdparser_build_URL_from_template (stream->cur_seg_template->
+            index, stream->cur_representation->id,
+            stream->segment_index +
+            GST_MPD_MULT_SEGMENT_BASE_NODE (stream->
+                cur_seg_template)->startNumber,
+            stream->cur_representation->bandwidth,
+            stream->segment_index * fragment->duration);
+      }
+    } else {
+      return FALSE;
+    }
+
+    GST_DEBUG ("mediaURL = %s", mediaURL);
+    GST_DEBUG ("indexURL = %s", indexURL);
+
+    fragment->timestamp = stream->segment_index * fragment->duration;
+  }
+
+  base_url = gst_uri_from_string (stream->baseURL);
+  frag_url = gst_uri_from_string_with_base (base_url, mediaURL);
+  g_free (mediaURL);
+  if (stream->queryURL) {
+    frag_url = gst_uri_make_writable (frag_url);
+    gst_uri_set_query_string (frag_url, stream->queryURL);
+  }
+  fragment->uri = gst_uri_to_string (frag_url);
+  gst_uri_unref (frag_url);
+
+  if (indexURL != NULL) {
+    frag_url = gst_uri_make_writable (gst_uri_from_string_with_base (base_url,
+            indexURL));
+    gst_uri_set_query_string (frag_url, stream->queryURL);
+    fragment->index_uri = gst_uri_to_string (frag_url);
+    gst_uri_unref (frag_url);
+    g_free (indexURL);
+  } else if (indexURL == NULL && (fragment->index_range_start
+          || fragment->index_range_end != -1)) {
+    /* index has no specific URL but has a range, we should only use this if
+     * the media also has a range, otherwise we are serving some data twice
+     * (in the media fragment and again in the index) */
+    if (!(fragment->range_start || fragment->range_end != -1)) {
+      GST_WARNING ("Ignoring index ranges because there isn't a media range "
+          "and URIs would be the same");
+      /* removing index information */
+      fragment->index_range_start = 0;
+      fragment->index_range_end = -1;
+    }
+  }
+
+  gst_uri_unref (base_url);
+
+  GST_DEBUG ("Loading chunk with URL %s", fragment->uri);
+
+  return TRUE;
+}
+
+gboolean
+gst_mpd_client2_has_next_segment (GstMPDClient2 * client,
+    GstActiveStream * stream, gboolean forward)
+{
+  if (forward) {
+    guint segments_count = gst_mpd_client2_get_segments_counts (client, stream);
+
+    if (segments_count > 0 && stream->segments
+        && stream->segment_index + 1 == segments_count) {
+      GstMediaSegment *segment;
+
+      segment = g_ptr_array_index (stream->segments, stream->segment_index);
+      if (segment->repeat >= 0
+          && stream->segment_repeat_index >= segment->repeat)
+        return FALSE;
+    } else if (segments_count > 0
+        && stream->segment_index + 1 >= segments_count) {
+      return FALSE;
+    }
+  } else {
+    if (stream->segment_index < 0)
+      return FALSE;
+  }
+
+  return TRUE;
+}
+
+GstFlowReturn
+gst_mpd_client2_advance_segment (GstMPDClient2 * client,
+    GstActiveStream * stream, gboolean forward)
+{
+  GstMediaSegment *segment;
+  GstFlowReturn ret = GST_FLOW_OK;
+  guint segments_count = gst_mpd_client2_get_segments_counts (client, stream);
+
+  GST_DEBUG ("Advancing segment. Current: %d / %d r:%d", stream->segment_index,
+      segments_count, stream->segment_repeat_index);
+
+  /* handle special cases first */
+  if (forward) {
+    if (segments_count > 0 && stream->segment_index >= segments_count) {
+      ret = GST_FLOW_EOS;
+      goto done;
+    }
+
+    if (stream->segments == NULL) {
+      if (stream->segment_index < 0) {
+        stream->segment_index = 0;
+      } else {
+        stream->segment_index++;
+        if (segments_count > 0 && stream->segment_index >= segments_count) {
+          ret = GST_FLOW_EOS;
+        }
+      }
+      goto done;
+    }
+
+    /* special case for when playback direction is reverted right at *
+     * the end of the segment list */
+    if (stream->segment_index < 0) {
+      stream->segment_index = 0;
+      goto done;
+    }
+  } else {
+    if (stream->segments == NULL)
+      stream->segment_index--;
+    if (stream->segment_index < 0) {
+      stream->segment_index = -1;
+      ret = GST_FLOW_EOS;
+      goto done;
+    }
+    if (stream->segments == NULL)
+      goto done;
+
+    /* special case for when playback direction is reverted right at *
+     * the end of the segment list */
+    if (stream->segment_index >= segments_count) {
+      stream->segment_index = segments_count - 1;
+      segment = g_ptr_array_index (stream->segments, stream->segment_index);
+      if (segment->repeat >= 0) {
+        stream->segment_repeat_index = segment->repeat;
+      } else {
+        GstClockTime start = segment->start;
+        GstClockTime end =
+            gst_mpd_client2_get_segment_end_time (client, stream->segments,
+            segment,
+            stream->segment_index);
+        stream->segment_repeat_index =
+            (guint) (end - start) / segment->duration;
+      }
+      goto done;
+    }
+  }
+
+  /* for the normal cases we can get the segment safely here */
+  segment = g_ptr_array_index (stream->segments, stream->segment_index);
+  if (forward) {
+    if (segment->repeat >= 0 && stream->segment_repeat_index >= segment->repeat) {
+      stream->segment_repeat_index = 0;
+      stream->segment_index++;
+      if (segments_count > 0 && stream->segment_index >= segments_count) {
+        ret = GST_FLOW_EOS;
+        goto done;
+      }
+    } else {
+      stream->segment_repeat_index++;
+    }
+  } else {
+    if (stream->segment_repeat_index == 0) {
+      stream->segment_index--;
+      if (stream->segment_index < 0) {
+        ret = GST_FLOW_EOS;
+        goto done;
+      }
+
+      segment = g_ptr_array_index (stream->segments, stream->segment_index);
+      /* negative repeats only seem to make sense at the end of a list,
+       * so this one will probably not be. Needs some sanity checking
+       * when loading the XML data. */
+      if (segment->repeat >= 0) {
+        stream->segment_repeat_index = segment->repeat;
+      } else {
+        GstClockTime start = segment->start;
+        GstClockTime end =
+            gst_mpd_client2_get_segment_end_time (client, stream->segments,
+            segment,
+            stream->segment_index);
+        stream->segment_repeat_index =
+            (guint) (end - start) / segment->duration;
+      }
+    } else {
+      stream->segment_repeat_index--;
+    }
+  }
+
+done:
+  GST_DEBUG ("Advanced to segment: %d / %d r:%d (ret: %s)",
+      stream->segment_index, segments_count,
+      stream->segment_repeat_index, gst_flow_get_name (ret));
+  return ret;
+}
+
+gboolean
+gst_mpd_client2_get_next_header (GstMPDClient2 * client, gchar ** uri,
+    guint stream_idx, gint64 * range_start, gint64 * range_end)
+{
+  GstActiveStream *stream;
+  GstStreamPeriod *stream_period;
+
+  stream = gst_mpd_client2_get_active_stream_by_index (client, stream_idx);
+  g_return_val_if_fail (stream != NULL, FALSE);
+  g_return_val_if_fail (stream->cur_representation != NULL, FALSE);
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  g_return_val_if_fail (stream_period != NULL, FALSE);
+  g_return_val_if_fail (stream_period->period != NULL, FALSE);
+
+  *range_start = 0;
+  *range_end = -1;
+
+  GST_DEBUG ("Looking for current representation header");
+  *uri = NULL;
+  if (stream->cur_segment_base) {
+    if (stream->cur_segment_base->Initialization) {
+      *uri =
+          g_strdup (gst_mpdparser_get_initializationURL (stream,
+              stream->cur_segment_base->Initialization));
+      if (stream->cur_segment_base->Initialization->range) {
+        *range_start =
+            stream->cur_segment_base->Initialization->range->first_byte_pos;
+        *range_end =
+            stream->cur_segment_base->Initialization->range->last_byte_pos;
+      }
+    } else if (stream->cur_segment_base->indexRange) {
+      *uri =
+          g_strdup (gst_mpdparser_get_initializationURL (stream,
+              stream->cur_segment_base->Initialization));
+      *range_start = 0;
+      *range_end = stream->cur_segment_base->indexRange->first_byte_pos - 1;
+    }
+  } else if (stream->cur_seg_template
+      && stream->cur_seg_template->initialization) {
+    *uri =
+        gst_mpdparser_build_URL_from_template (stream->cur_seg_template->
+        initialization, stream->cur_representation->id, 0,
+        stream->cur_representation->bandwidth, 0);
+  }
+
+  return *uri == NULL ? FALSE : TRUE;
+}
+
+gboolean
+gst_mpd_client2_get_next_header_index (GstMPDClient2 * client, gchar ** uri,
+    guint stream_idx, gint64 * range_start, gint64 * range_end)
+{
+  GstActiveStream *stream;
+  GstStreamPeriod *stream_period;
+
+  stream = gst_mpd_client2_get_active_stream_by_index (client, stream_idx);
+  g_return_val_if_fail (stream != NULL, FALSE);
+  g_return_val_if_fail (stream->cur_representation != NULL, FALSE);
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  g_return_val_if_fail (stream_period != NULL, FALSE);
+  g_return_val_if_fail (stream_period->period != NULL, FALSE);
+
+  *range_start = 0;
+  *range_end = -1;
+
+  GST_DEBUG ("Looking for current representation index");
+  *uri = NULL;
+  if (stream->cur_segment_base && stream->cur_segment_base->indexRange) {
+    *uri =
+        g_strdup (gst_mpdparser_get_initializationURL (stream,
+            stream->cur_segment_base->RepresentationIndex));
+    *range_start = stream->cur_segment_base->indexRange->first_byte_pos;
+    *range_end = stream->cur_segment_base->indexRange->last_byte_pos;
+  } else if (stream->cur_seg_template && stream->cur_seg_template->index) {
+    *uri =
+        gst_mpdparser_build_URL_from_template (stream->cur_seg_template->index,
+        stream->cur_representation->id, 0,
+        stream->cur_representation->bandwidth, 0);
+  }
+
+  return *uri == NULL ? FALSE : TRUE;
+}
+
+GstClockTime
+gst_mpd_client2_get_next_fragment_duration (GstMPDClient2 * client,
+    GstActiveStream * stream)
+{
+  GstMediaSegment *media_segment = NULL;
+  gint seg_idx;
+
+  g_return_val_if_fail (stream != NULL, 0);
+
+  seg_idx = stream->segment_index;
+
+  if (stream->segments) {
+    if (seg_idx < stream->segments->len && seg_idx >= 0)
+      media_segment = g_ptr_array_index (stream->segments, seg_idx);
+
+    return media_segment == NULL ? 0 : media_segment->duration;
+  } else {
+    GstClockTime duration =
+        gst_mpd_client2_get_segment_duration (client, stream, NULL);
+    guint segments_count = gst_mpd_client2_get_segments_counts (client, stream);
+
+    g_return_val_if_fail (GST_MPD_MULT_SEGMENT_BASE_NODE
+        (stream->cur_seg_template)->SegmentTimeline == NULL, 0);
+
+    if (!GST_CLOCK_TIME_IS_VALID (duration) || (segments_count > 0
+            && seg_idx >= segments_count)) {
+      return 0;
+    }
+    return duration;
+  }
+}
+
+GstClockTime
+gst_mpd_client2_get_media_presentation_duration (GstMPDClient2 * client)
+{
+  GstClockTime duration;
+
+  g_return_val_if_fail (client != NULL, GST_CLOCK_TIME_NONE);
+
+  if (client->mpd_root_node->mediaPresentationDuration != -1) {
+    duration = client->mpd_root_node->mediaPresentationDuration * GST_MSECOND;
+  } else {
+    /* We can only get the duration for on-demand streams */
+    duration = GST_CLOCK_TIME_NONE;
+  }
+
+  return duration;
+}
+
+gboolean
+gst_mpd_client2_set_period_id (GstMPDClient2 * client, const gchar * period_id)
+{
+  GstStreamPeriod *next_stream_period;
+  gboolean ret = FALSE;
+  GList *iter;
+  guint period_idx;
+
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->periods != NULL, FALSE);
+  g_return_val_if_fail (period_id != NULL, FALSE);
+
+  if (!gst_mpd_client2_setup_media_presentation (client, GST_CLOCK_TIME_NONE,
+          -1, period_id))
+    return FALSE;
+
+  for (period_idx = 0, iter = client->periods; iter;
+      period_idx++, iter = g_list_next (iter)) {
+    next_stream_period = iter->data;
+
+    if (next_stream_period->period->id
+        && strcmp (next_stream_period->period->id, period_id) == 0) {
+      ret = TRUE;
+      client->period_idx = period_idx;
+      break;
+    }
+  }
+
+  return ret;
+}
+
+gboolean
+gst_mpd_client2_set_period_index (GstMPDClient2 * client, guint period_idx)
+{
+  GstStreamPeriod *next_stream_period;
+  gboolean ret = FALSE;
+
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->periods != NULL, FALSE);
+
+  if (!gst_mpd_client2_setup_media_presentation (client, -1, period_idx, NULL))
+    return FALSE;
+
+  next_stream_period = g_list_nth_data (client->periods, period_idx);
+  if (next_stream_period != NULL) {
+    client->period_idx = period_idx;
+    ret = TRUE;
+  }
+
+  return ret;
+}
+
+guint
+gst_mpd_client2_get_period_index (GstMPDClient2 * client)
+{
+  guint period_idx;
+
+  g_return_val_if_fail (client != NULL, 0);
+  period_idx = client->period_idx;
+
+  return period_idx;
+}
+
+const gchar *
+gst_mpd_client2_get_period_id (GstMPDClient2 * client)
+{
+  GstStreamPeriod *period;
+  gchar *period_id = NULL;
+
+  g_return_val_if_fail (client != NULL, 0);
+  period = g_list_nth_data (client->periods, client->period_idx);
+  if (period && period->period)
+    period_id = period->period->id;
+
+  return period_id;
+}
+
+gboolean
+gst_mpd_client2_has_next_period (GstMPDClient2 * client)
+{
+  GList *next_stream_period;
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->periods != NULL, FALSE);
+
+  if (!gst_mpd_client2_setup_media_presentation (client, GST_CLOCK_TIME_NONE,
+          client->period_idx + 1, NULL))
+    return FALSE;
+
+  next_stream_period =
+      g_list_nth_data (client->periods, client->period_idx + 1);
+  return next_stream_period != NULL;
+}
+
+gboolean
+gst_mpd_client2_has_previous_period (GstMPDClient2 * client)
+{
+  GList *next_stream_period;
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->periods != NULL, FALSE);
+
+  if (!gst_mpd_client2_setup_media_presentation (client, GST_CLOCK_TIME_NONE,
+          client->period_idx - 1, NULL))
+    return FALSE;
+
+  next_stream_period =
+      g_list_nth_data (client->periods, client->period_idx - 1);
+
+  return next_stream_period != NULL;
+}
+
+gint
+gst_mpd_client2_get_rep_idx_with_min_bandwidth (GList * Representations)
+{
+  GList *list = NULL, *lowest = NULL;
+  GstMPDRepresentationNode *rep = NULL;
+  gint lowest_bandwidth = -1;
+
+  if (Representations == NULL)
+    return -1;
+
+  for (list = g_list_first (Representations); list; list = g_list_next (list)) {
+    rep = (GstMPDRepresentationNode *) list->data;
+    if (rep && (!lowest || rep->bandwidth < lowest_bandwidth)) {
+      lowest = list;
+      lowest_bandwidth = rep->bandwidth;
+    }
+  }
+
+  return lowest ? g_list_position (Representations, lowest) : -1;
+}
+
+gint
+gst_mpd_client2_get_rep_idx_with_max_bandwidth (GList * Representations,
+    gint64 max_bandwidth, gint max_video_width, gint max_video_height, gint
+    max_video_framerate_n, gint max_video_framerate_d)
+{
+  GList *list = NULL, *best = NULL;
+  GstMPDRepresentationNode *representation;
+  gint best_bandwidth = 0;
+
+  GST_DEBUG ("max_bandwidth = %" G_GINT64_FORMAT, max_bandwidth);
+
+  if (Representations == NULL)
+    return -1;
+
+  if (max_bandwidth <= 0)       /* 0 => get lowest representation available */
+    return gst_mpd_client2_get_rep_idx_with_min_bandwidth (Representations);
+
+  for (list = g_list_first (Representations); list; list = g_list_next (list)) {
+    GstXMLFrameRate *framerate = NULL;
+
+    representation = (GstMPDRepresentationNode *) list->data;
+
+    /* FIXME: Really? */
+    if (!representation)
+      continue;
+
+    framerate = GST_MPD_REPRESENTATION_BASE_NODE (representation)->frameRate;
+    if (!framerate)
+      framerate =
+          GST_MPD_REPRESENTATION_BASE_NODE (representation)->maxFrameRate;
+
+    if (framerate && max_video_framerate_n > 0) {
+      if (gst_util_fraction_compare (framerate->num, framerate->den,
+              max_video_framerate_n, max_video_framerate_d) > 0)
+        continue;
+    }
+
+    if (max_video_width > 0
+        && GST_MPD_REPRESENTATION_BASE_NODE (representation)->width >
+        max_video_width)
+      continue;
+    if (max_video_height > 0
+        && GST_MPD_REPRESENTATION_BASE_NODE (representation)->height >
+        max_video_height)
+      continue;
+
+    if (representation->bandwidth <= max_bandwidth &&
+        representation->bandwidth > best_bandwidth) {
+      best = list;
+      best_bandwidth = representation->bandwidth;
+    }
+  }
+
+  return best ? g_list_position (Representations, best) : -1;
+}
+
+void
+gst_mpd_client2_seek_to_first_segment (GstMPDClient2 * client)
+{
+  GList *list;
+
+  g_return_if_fail (client != NULL);
+  g_return_if_fail (client->active_streams != NULL);
+
+  for (list = g_list_first (client->active_streams); list;
+      list = g_list_next (list)) {
+    GstActiveStream *stream = (GstActiveStream *) list->data;
+    if (stream) {
+      stream->segment_index = 0;
+      stream->segment_repeat_index = 0;
+    }
+  }
+}
+
+static guint
+gst_mpd_client2_get_segments_counts (GstMPDClient2 * client,
+    GstActiveStream * stream)
+{
+  GstStreamPeriod *stream_period;
+
+  g_return_val_if_fail (stream != NULL, 0);
+
+  if (stream->segments)
+    return stream->segments->len;
+  g_return_val_if_fail (GST_MPD_MULT_SEGMENT_BASE_NODE
+      (stream->cur_seg_template)->SegmentTimeline == NULL, 0);
+
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  if (stream_period->duration != -1)
+    return gst_util_uint64_scale_ceil (stream_period->duration, 1,
+        gst_mpd_client2_get_segment_duration (client, stream, NULL));
+
+  return 0;
+}
+
+gboolean
+gst_mpd_client2_is_live (GstMPDClient2 * client)
+{
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+  return client->mpd_root_node->type == GST_MPD_FILE_TYPE_DYNAMIC;
+}
+
+guint
+gst_mpd_client2_get_nb_active_stream (GstMPDClient2 * client)
+{
+  g_return_val_if_fail (client != NULL, 0);
+
+  return g_list_length (client->active_streams);
+}
+
+guint
+gst_mpd_client2_get_nb_adaptationSet (GstMPDClient2 * client)
+{
+  GstStreamPeriod *stream_period;
+
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  g_return_val_if_fail (stream_period != NULL, 0);
+  g_return_val_if_fail (stream_period->period != NULL, 0);
+
+  return g_list_length (stream_period->period->AdaptationSets);
+}
+
+GstActiveStream *
+gst_mpd_client2_get_active_stream_by_index (GstMPDClient2 * client,
+    guint stream_idx)
+{
+  g_return_val_if_fail (client != NULL, NULL);
+  g_return_val_if_fail (client->active_streams != NULL, NULL);
+
+  return g_list_nth_data (client->active_streams, stream_idx);
+}
+
+gboolean
+gst_mpd_client2_active_stream_contains_subtitles (GstActiveStream * stream)
+{
+  const gchar *mimeType;
+  const gchar *adapt_set_codecs;
+  const gchar *rep_codecs;
+
+  mimeType =
+      GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_representation)->mimeType;
+  if (!mimeType)
+    mimeType =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->mimeType;
+
+  if (g_strcmp0 (mimeType, "application/ttml+xml") == 0 ||
+      g_strcmp0 (mimeType, "application/x-subtitle-vtt") == 0 ||
+      g_strcmp0 (mimeType, "text/vtt") == 0)
+    return TRUE;
+
+  adapt_set_codecs =
+      GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->codecs;
+  rep_codecs =
+      GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_representation)->codecs;
+
+  if (adapt_set_codecs) {
+    if (g_str_has_prefix (adapt_set_codecs, "stpp"))
+      return TRUE;
+    if (g_str_has_prefix (adapt_set_codecs, "wvtt"))
+      return TRUE;
+  }
+  if (rep_codecs) {
+    if (g_str_has_prefix (rep_codecs, "stpp"))
+      return TRUE;
+    if (g_str_has_prefix (rep_codecs, "wvtt"))
+      return TRUE;
+  }
+
+  return FALSE;
+}
+
+GstCaps *
+gst_mpd_client2_get_codec_caps (GstActiveStream * stream)
+{
+  GstCaps *ret = NULL;
+  GList *iter;
+  GstMPDAdaptationSetNode *adapt_set = stream->cur_adapt_set;
+
+  if (adapt_set == NULL) {
+    GST_WARNING ("No adaptation set => No caps");
+    return NULL;
+  }
+  /* The adaptation set may already have caps, in which case it is the largest
+   * set of possible caps of all representations (representations must have properties
+   * that are smaller than the adaptation set) */
+
+  if (adapt_set->parent_instance.caps) {
+    ret = gst_caps_copy (adapt_set->parent_instance.caps);
+    GST_DEBUG ("Adaptation set caps %" GST_PTR_FORMAT, ret);
+    return ret;
+  }
+
+  /* Iterate over the current adaptation set representation */
+  for (iter = stream->cur_adapt_set->Representations; iter; iter = iter->next) {
+    GstMPDRepresentationBaseNode *rep =
+        (GstMPDRepresentationBaseNode *) iter->data;
+
+    if (rep->caps) {
+      GST_DEBUG ("Adding representation caps %" GST_PTR_FORMAT, rep->caps);
+      if (ret)
+        ret = gst_caps_merge (ret, gst_caps_ref (rep->caps));
+      else
+        ret = gst_caps_copy (rep->caps);
+    }
+  }
+
+  GST_DEBUG ("Merged caps %" GST_PTR_FORMAT, ret);
+  return ret;
+}
+
+GstCaps *
+gst_mpd_client2_get_stream_caps (GstActiveStream * stream)
+{
+  const gchar *mimeType, *caps_string;
+  GstCaps *ret = NULL;
+
+  if (stream == NULL || stream->cur_adapt_set == NULL
+      || stream->cur_representation == NULL)
+    return NULL;
+
+  mimeType =
+      GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_representation)->mimeType;
+  if (mimeType == NULL) {
+    mimeType =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->mimeType;
+  }
+
+  caps_string = gst_mpd_helper_mimetype_to_caps (mimeType);
+
+  if ((g_strcmp0 (caps_string, "application/mp4") == 0)
+      && gst_mpd_client2_active_stream_contains_subtitles (stream))
+    caps_string = "video/quicktime";
+
+  if (caps_string)
+    ret = gst_caps_from_string (caps_string);
+
+  return ret;
+}
+
+gboolean
+gst_mpd_client2_get_bitstream_switching_flag (GstActiveStream * stream)
+{
+  if (stream == NULL || stream->cur_adapt_set == NULL)
+    return FALSE;
+
+  return stream->cur_adapt_set->bitstreamSwitching;
+}
+
+guint
+gst_mpd_client2_get_video_stream_width (GstActiveStream * stream)
+{
+  guint width;
+
+  if (stream == NULL || stream->cur_adapt_set == NULL
+      || stream->cur_representation == NULL)
+    return 0;
+
+  width = GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_representation)->width;
+  if (width == 0) {
+    width = GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->width;
+  }
+
+  return width;
+}
+
+guint
+gst_mpd_client2_get_video_stream_height (GstActiveStream * stream)
+{
+  guint height;
+
+  if (stream == NULL || stream->cur_adapt_set == NULL
+      || stream->cur_representation == NULL)
+    return 0;
+
+  height =
+      GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_representation)->height;
+  if (height == 0) {
+    height = GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->height;
+  }
+
+  return height;
+}
+
+gboolean
+gst_mpd_client2_get_video_stream_framerate (GstActiveStream * stream,
+    gint * fps_num, gint * fps_den)
+{
+  if (stream == NULL)
+    return FALSE;
+
+  if (stream->cur_adapt_set &&
+      GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->frameRate !=
+      NULL) {
+    *fps_num =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->
+        frameRate->num;
+    *fps_den =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->
+        frameRate->den;
+    return TRUE;
+  }
+
+  if (stream->cur_adapt_set &&
+      GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->maxFrameRate !=
+      NULL) {
+    *fps_num =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->
+        maxFrameRate->num;
+    *fps_den =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->cur_adapt_set)->
+        maxFrameRate->den;
+    return TRUE;
+  }
+
+  if (stream->cur_representation &&
+      GST_MPD_REPRESENTATION_BASE_NODE (stream->
+          cur_representation)->frameRate != NULL) {
+    *fps_num =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->
+        cur_representation)->frameRate->num;
+    *fps_den =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->
+        cur_representation)->frameRate->den;
+    return TRUE;
+  }
+
+  if (stream->cur_representation &&
+      GST_MPD_REPRESENTATION_BASE_NODE (stream->
+          cur_representation)->maxFrameRate != NULL) {
+    *fps_num =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->
+        cur_representation)->maxFrameRate->num;
+    *fps_den =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->
+        cur_representation)->maxFrameRate->den;
+    return TRUE;
+  }
+
+  return FALSE;
+}
+
+guint
+gst_mpd_client2_get_audio_stream_rate (GstActiveStream * stream)
+{
+  const gchar *rate;
+
+  if (stream == NULL || stream->cur_adapt_set == NULL
+      || stream->cur_representation == NULL)
+    return 0;
+
+  rate =
+      GST_MPD_REPRESENTATION_BASE_NODE (stream->
+      cur_representation)->audioSamplingRate;
+  if (rate == NULL) {
+    rate =
+        GST_MPD_REPRESENTATION_BASE_NODE (stream->
+        cur_adapt_set)->audioSamplingRate;
+  }
+
+  return rate ? atoi (rate) : 0;
+}
+
+guint
+gst_mpd_client2_get_audio_stream_num_channels (GstActiveStream * stream)
+{
+  if (stream == NULL || stream->cur_adapt_set == NULL
+      || stream->cur_representation == NULL)
+    return 0;
+  /* TODO: here we have to parse the AudioChannelConfiguration descriptors */
+  return 0;
+}
+
+guint
+gst_mpd_client2_get_list_and_nb_of_audio_language (GstMPDClient2 * client,
+    GList ** lang)
+{
+  GstStreamPeriod *stream_period;
+  GstMPDAdaptationSetNode *adapt_set;
+  GList *adaptation_sets, *list;
+  const gchar *this_mimeType = "audio";
+  gchar *mimeType = NULL;
+  guint nb_adaptation_set = 0;
+
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  g_return_val_if_fail (stream_period != NULL, 0);
+  g_return_val_if_fail (stream_period->period != NULL, 0);
+
+  adaptation_sets =
+      gst_mpd_client2_get_adaptation_sets_for_period (client, stream_period);
+  for (list = adaptation_sets; list; list = g_list_next (list)) {
+    adapt_set = (GstMPDAdaptationSetNode *) list->data;
+    if (adapt_set && adapt_set->lang) {
+      gchar *this_lang = adapt_set->lang;
+      GstMPDRepresentationNode *rep;
+      rep =
+          gst_mpd_client2_get_lowest_representation
+          (adapt_set->Representations);
+      mimeType = NULL;
+      if (GST_MPD_REPRESENTATION_BASE_NODE (rep))
+        mimeType = GST_MPD_REPRESENTATION_BASE_NODE (rep)->mimeType;
+      if (!mimeType && GST_MPD_REPRESENTATION_BASE_NODE (adapt_set)) {
+        mimeType = GST_MPD_REPRESENTATION_BASE_NODE (adapt_set)->mimeType;
+      }
+
+      if (gst_mpd_helper_strncmp_ext (mimeType, this_mimeType) == 0) {
+        nb_adaptation_set++;
+        *lang = g_list_append (*lang, this_lang);
+      }
+    }
+  }
+
+  return nb_adaptation_set;
+}
+
+
+GstDateTime *
+gst_mpd_client2_get_next_segment_availability_start_time (GstMPDClient2 *
+    client, GstActiveStream * stream)
+{
+  GstDateTime *availability_start_time, *rv;
+  gint seg_idx;
+  GstMediaSegment *segment;
+  GstClockTime segmentEndTime;
+  const GstStreamPeriod *stream_period;
+  GstClockTime period_start = 0;
+
+  g_return_val_if_fail (client != NULL, NULL);
+  g_return_val_if_fail (stream != NULL, NULL);
+
+  stream_period = gst_mpd_client2_get_stream_period (client);
+  if (stream_period && stream_period->period) {
+    period_start = stream_period->start;
+  }
+
+  seg_idx = stream->segment_index;
+
+  if (stream->segments && seg_idx < stream->segments->len) {
+    segment = g_ptr_array_index (stream->segments, seg_idx);
+
+    if (segment->repeat >= 0) {
+      segmentEndTime = segment->start + (stream->segment_repeat_index + 1) *
+          segment->duration;
+    } else if (seg_idx < stream->segments->len - 1) {
+      const GstMediaSegment *next_segment =
+          g_ptr_array_index (stream->segments, seg_idx + 1);
+      segmentEndTime = next_segment->start;
+    } else {
+      g_return_val_if_fail (stream_period != NULL, NULL);
+      segmentEndTime = period_start + stream_period->duration;
+    }
+  } else {
+    GstClockTime seg_duration;
+    seg_duration = gst_mpd_client2_get_segment_duration (client, stream, NULL);
+    if (seg_duration == 0)
+      return NULL;
+    segmentEndTime = period_start + (1 + seg_idx) * seg_duration;
+  }
+
+  availability_start_time =
+      gst_mpd_client2_get_availability_start_time (client);
+  if (availability_start_time == NULL) {
+    GST_WARNING_OBJECT (client, "Failed to get availability_start_time");
+    return NULL;
+  }
+
+  rv = gst_mpd_client2_add_time_difference (availability_start_time,
+      segmentEndTime);
+  gst_date_time_unref (availability_start_time);
+  if (rv == NULL) {
+    GST_WARNING_OBJECT (client, "Failed to offset availability_start_time");
+    return NULL;
+  }
+
+  return rv;
+}
+
+gboolean
+gst_mpd_client2_seek_to_time (GstMPDClient2 * client, GDateTime * time)
+{
+  GDateTime *start;
+  GTimeSpan ts_microseconds;
+  GstClockTime ts;
+  gboolean ret = TRUE;
+  GList *stream;
+
+  g_return_val_if_fail (gst_mpd_client2_is_live (client), FALSE);
+  g_return_val_if_fail (client->mpd_root_node->availabilityStartTime != NULL,
+      FALSE);
+
+  start =
+      gst_date_time_to_g_date_time (client->mpd_root_node->
+      availabilityStartTime);
+
+  ts_microseconds = g_date_time_difference (time, start);
+  g_date_time_unref (start);
+
+  /* Clamp to availability start time, otherwise calculations wrap around */
+  if (ts_microseconds < 0)
+    ts_microseconds = 0;
+
+  ts = ts_microseconds * GST_USECOND;
+  for (stream = client->active_streams; stream; stream = g_list_next (stream)) {
+    ret =
+        ret & gst_mpd_client2_stream_seek (client, stream->data, TRUE, 0, ts,
+        NULL);
+  }
+  return ret;
+}
+
+gboolean
+gst_mpd_client2_has_isoff_ondemand_profile (GstMPDClient2 * client)
+{
+  return client->profile_isoff_ondemand;
+}
+
+/**
+ * gst_mpd_client2_parse_default_presentation_delay:
+ * @client: #GstMPDClient2 that has a parsed manifest
+ * @default_presentation_delay: A string that specifies a time period
+ * in fragments (e.g. "5 f"), seconds ("12 s") or milliseconds
+ * ("12000 ms")
+ * Returns: the parsed string in milliseconds
+ *
+ * Since: 1.6
+ */
+gint64
+gst_mpd_client2_parse_default_presentation_delay (GstMPDClient2 * client,
+    const gchar * default_presentation_delay)
+{
+  gint64 value;
+  char *endptr = NULL;
+
+  g_return_val_if_fail (client != NULL, 0);
+  g_return_val_if_fail (default_presentation_delay != NULL, 0);
+  value = strtol (default_presentation_delay, &endptr, 10);
+  if (endptr == default_presentation_delay || value == 0) {
+    return 0;
+  }
+  while (*endptr == ' ')
+    endptr++;
+  if (*endptr == 's' || *endptr == 'S') {
+    value *= 1000;              /* convert to ms */
+  } else if (*endptr == 'f' || *endptr == 'F') {
+    gint64 segment_duration;
+    g_assert (client->mpd_root_node != NULL);
+    segment_duration = client->mpd_root_node->maxSegmentDuration;
+    value *= segment_duration;
+  } else if (*endptr != 'm' && *endptr != 'M') {
+    GST_ERROR ("Unable to parse default presentation delay: %s",
+        default_presentation_delay);
+    value = 0;
+  }
+  return value;
+}
+
+GstClockTime
+gst_mpd_client2_get_maximum_segment_duration (GstMPDClient2 * client)
+{
+  GstClockTime ret = GST_CLOCK_TIME_NONE, dur;
+  GList *stream;
+
+  g_return_val_if_fail (client != NULL, GST_CLOCK_TIME_NONE);
+  g_return_val_if_fail (client->mpd_root_node != NULL, GST_CLOCK_TIME_NONE);
+
+  if (client->mpd_root_node->maxSegmentDuration != GST_MPD_DURATION_NONE) {
+    return client->mpd_root_node->maxSegmentDuration * GST_MSECOND;
+  }
+
+  /* According to the DASH specification, if maxSegmentDuration is not present:
+     "If not present, then the maximum Segment duration shall be the maximum
+     duration of any Segment documented in this MPD"
+   */
+  for (stream = client->active_streams; stream; stream = g_list_next (stream)) {
+    dur = gst_mpd_client2_get_segment_duration (client, stream->data, NULL);
+    if (dur != GST_CLOCK_TIME_NONE && (dur > ret || ret == GST_CLOCK_TIME_NONE)) {
+      ret = dur;
+    }
+  }
+  return ret;
+}
+
+guint
+gst_mpd_client2_get_period_index_at_time (GstMPDClient2 * client,
+    GstDateTime * time)
+{
+  GList *iter;
+  guint period_idx = G_MAXUINT;
+  guint idx;
+  gint64 time_offset;
+  GstDateTime *avail_start =
+      gst_mpd_client2_get_availability_start_time (client);
+  GstStreamPeriod *stream_period;
+
+  if (avail_start == NULL)
+    return 0;
+
+  time_offset = gst_mpd_client2_calculate_time_difference (avail_start, time);
+  gst_date_time_unref (avail_start);
+
+  if (time_offset < 0)
+    return 0;
+
+  if (!gst_mpd_client2_setup_media_presentation (client, time_offset, -1, NULL))
+    return 0;
+
+  for (idx = 0, iter = client->periods; iter; idx++, iter = g_list_next (iter)) {
+    stream_period = iter->data;
+    if (stream_period->start <= time_offset
+        && (!GST_CLOCK_TIME_IS_VALID (stream_period->duration)
+            || stream_period->start + stream_period->duration > time_offset)) {
+      period_idx = idx;
+      break;
+    }
+  }
+
+  return period_idx;
+}
+
+/* add or set node methods */
+
+gboolean
+gst_mpd_client2_set_root_node (GstMPDClient2 * client,
+    const gchar * property_name, ...)
+{
+  va_list myargs;
+  g_return_val_if_fail (client != NULL, FALSE);
+
+  if (!client->mpd_root_node)
+    client->mpd_root_node = gst_mpd_root_node_new ();
+
+  va_start (myargs, property_name);
+  g_object_set_valist (G_OBJECT (client->mpd_root_node), property_name, myargs);
+  va_end (myargs);
+
+  return TRUE;
+}
+
+gboolean
+gst_mpd_client2_add_baseurl_node (GstMPDClient2 * client,
+    const gchar * property_name, ...)
+{
+  GstMPDBaseURLNode *baseurl_node = NULL;
+  va_list myargs;
+
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+  va_start (myargs, property_name);
+
+  baseurl_node = gst_mpd_baseurl_node_new ();
+  g_object_set_valist (G_OBJECT (baseurl_node), property_name, myargs);
+  client->mpd_root_node->BaseURLs =
+      g_list_append (client->mpd_root_node->BaseURLs, baseurl_node);
+
+  va_end (myargs);
+  return TRUE;
+}
+
+/* returns a period id */
+gchar *
+gst_mpd_client2_set_period_node (GstMPDClient2 * client,
+    gchar * period_id, const gchar * property_name, ...)
+{
+  GstMPDPeriodNode *period_node = NULL;
+  va_list myargs;
+
+  g_return_val_if_fail (client != NULL, NULL);
+  g_return_val_if_fail (client->mpd_root_node != NULL, NULL);
+
+  period_node =
+      GST_MPD_PERIOD_NODE (gst_mpd_client2_get_period_with_id
+      (client->mpd_root_node->Periods, period_id));
+  if (!period_node) {
+    period_node = gst_mpd_period_node_new ();
+    if (period_id)
+      period_node->id = g_strdup (period_id);
+    else
+      period_node->id =
+          _generate_new_string_id (client->mpd_root_node->Periods,
+          "period_%.2d", gst_mpd_client2_get_period_with_id);
+    client->mpd_root_node->Periods =
+        g_list_append (client->mpd_root_node->Periods, period_node);
+  }
+
+  va_start (myargs, property_name);
+  g_object_set_valist (G_OBJECT (period_node), property_name, myargs);
+  va_end (myargs);
+
+  return period_node->id;
+}
+
+/* returns an adaptation set id */
+guint
+gst_mpd_client2_set_adaptation_set_node (GstMPDClient2 * client,
+    gchar * period_id, guint adaptation_set_id, const gchar * property_name,
+    ...)
+{
+  GstMPDAdaptationSetNode *adap_node = NULL;
+  GstMPDPeriodNode *period_node = NULL;
+  va_list myargs;
+
+  g_return_val_if_fail (client != NULL, 0);
+  g_return_val_if_fail (client->mpd_root_node != NULL, 0);
+
+  period_node =
+      GST_MPD_PERIOD_NODE (gst_mpd_client2_get_period_with_id
+      (client->mpd_root_node->Periods, period_id));
+  g_return_val_if_fail (period_node != NULL, 0);
+  adap_node =
+      GST_MPD_ADAPTATION_SET_NODE (gst_mpd_client2_get_adaptation_set_with_id
+      (period_node->AdaptationSets, adaptation_set_id));
+  if (!adap_node) {
+    adap_node = gst_mpd_adaptation_set_node_new ();
+    if (adaptation_set_id)
+      adap_node->id = adaptation_set_id;
+    else
+      adap_node->id =
+          _generate_new_id (period_node->AdaptationSets,
+          gst_mpd_client2_get_adaptation_set_with_id);
+    GST_DEBUG_OBJECT (client, "Add a new adaptation set with id %d",
+        adap_node->id);
+    period_node->AdaptationSets =
+        g_list_append (period_node->AdaptationSets, adap_node);
+  }
+
+  va_start (myargs, property_name);
+  g_object_set_valist (G_OBJECT (adap_node), property_name, myargs);
+  va_end (myargs);
+
+  return adap_node->id;
+}
+
+/* returns a representation id */
+gchar *
+gst_mpd_client2_set_representation_node (GstMPDClient2 * client,
+    gchar * period_id, guint adaptation_set_id, gchar * representation_id,
+    const gchar * property_name, ...)
+{
+  GstMPDRepresentationNode *rep_node = NULL;
+  GstMPDAdaptationSetNode *adap_set_node = NULL;
+  GstMPDPeriodNode *period_node = NULL;
+  va_list myargs;
+
+  g_return_val_if_fail (client != NULL, NULL);
+  g_return_val_if_fail (client->mpd_root_node != NULL, NULL);
+
+  period_node =
+      GST_MPD_PERIOD_NODE (gst_mpd_client2_get_period_with_id
+      (client->mpd_root_node->Periods, period_id));
+  adap_set_node =
+      GST_MPD_ADAPTATION_SET_NODE (gst_mpd_client2_get_adaptation_set_with_id
+      (period_node->AdaptationSets, adaptation_set_id));
+  g_return_val_if_fail (adap_set_node != NULL, NULL);
+  rep_node =
+      GST_MPD_REPRESENTATION_NODE (gst_mpd_client2_get_representation_with_id
+      (adap_set_node->Representations, representation_id));
+  if (!rep_node) {
+    rep_node = gst_mpd_representation_node_new ();
+    if (representation_id)
+      rep_node->id = g_strdup (representation_id);
+    else
+      rep_node->id =
+          _generate_new_string_id (adap_set_node->Representations,
+          "representation_%.2d", gst_mpd_client2_get_representation_with_id);
+    GST_DEBUG_OBJECT (client, "Add a new representation with id %s",
+        rep_node->id);
+    adap_set_node->Representations =
+        g_list_append (adap_set_node->Representations, rep_node);
+  }
+
+  va_start (myargs, property_name);
+  g_object_set_valist (G_OBJECT (rep_node), property_name, myargs);
+  va_end (myargs);
+
+  return rep_node->id;
+}
+
+/* add/set a segment list node */
+gboolean
+gst_mpd_client2_set_segment_list (GstMPDClient2 * client,
+    gchar * period_id, guint adap_set_id, gchar * rep_id,
+    const gchar * property_name, ...)
+{
+  GstMPDRepresentationNode *representation = NULL;
+  GstMPDAdaptationSetNode *adaptation_set = NULL;
+  GstMPDPeriodNode *period = NULL;
+  va_list myargs;
+
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+  period =
+      GST_MPD_PERIOD_NODE (gst_mpd_client2_get_period_with_id
+      (client->mpd_root_node->Periods, period_id));
+  adaptation_set =
+      GST_MPD_ADAPTATION_SET_NODE (gst_mpd_client2_get_adaptation_set_with_id
+      (period->AdaptationSets, adap_set_id));
+  g_return_val_if_fail (adaptation_set != NULL, FALSE);
+
+  representation =
+      GST_MPD_REPRESENTATION_NODE (gst_mpd_client2_get_representation_with_id
+      (adaptation_set->Representations, rep_id));
+  if (!representation->SegmentList) {
+    representation->SegmentList = gst_mpd_segment_list_node_new ();
+  }
+
+  va_start (myargs, property_name);
+  g_object_set_valist (G_OBJECT (representation->SegmentList), property_name,
+      myargs);
+  va_end (myargs);
+
+  return TRUE;
+}
+
+/* add/set a segment template node */
+gboolean
+gst_mpd_client2_set_segment_template (GstMPDClient2 * client,
+    gchar * period_id, guint adap_set_id, gchar * rep_id,
+    const gchar * property_name, ...)
+{
+  GstMPDRepresentationNode *representation = NULL;
+  GstMPDAdaptationSetNode *adaptation_set = NULL;
+  GstMPDPeriodNode *period = NULL;
+  va_list myargs;
+
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+  period =
+      GST_MPD_PERIOD_NODE (gst_mpd_client2_get_period_with_id
+      (client->mpd_root_node->Periods, period_id));
+  adaptation_set =
+      GST_MPD_ADAPTATION_SET_NODE (gst_mpd_client2_get_adaptation_set_with_id
+      (period->AdaptationSets, adap_set_id));
+  g_return_val_if_fail (adaptation_set != NULL, FALSE);
+
+  representation =
+      GST_MPD_REPRESENTATION_NODE (gst_mpd_client2_get_representation_with_id
+      (adaptation_set->Representations, rep_id));
+  if (!representation->SegmentTemplate) {
+    representation->SegmentTemplate = gst_mpd_segment_template_node_new ();
+  }
+
+  va_start (myargs, property_name);
+  g_object_set_valist (G_OBJECT (representation->SegmentTemplate),
+      property_name, myargs);
+  va_end (myargs);
+
+  return TRUE;
+}
+
+/* add a segmentURL node with to a SegmentList node */
+gboolean
+gst_mpd_client2_add_segment_url (GstMPDClient2 * client,
+    gchar * period_id, guint adap_set_id, gchar * rep_id,
+    const gchar * property_name, ...)
+{
+  GstMPDRepresentationNode *representation = NULL;
+  GstMPDAdaptationSetNode *adaptation_set = NULL;
+  GstMPDPeriodNode *period = NULL;
+  GstMPDSegmentURLNode *segment_url = NULL;
+  guint64 media_presentation_duration = 0;
+  va_list myargs;
+
+  g_return_val_if_fail (client != NULL, FALSE);
+  g_return_val_if_fail (client->mpd_root_node != NULL, FALSE);
+
+  period =
+      GST_MPD_PERIOD_NODE (gst_mpd_client2_get_period_with_id
+      (client->mpd_root_node->Periods, period_id));
+  adaptation_set =
+      GST_MPD_ADAPTATION_SET_NODE (gst_mpd_client2_get_adaptation_set_with_id
+      (period->AdaptationSets, adap_set_id));
+  g_return_val_if_fail (adaptation_set != NULL, FALSE);
+
+  representation =
+      GST_MPD_REPRESENTATION_NODE (gst_mpd_client2_get_representation_with_id
+      (adaptation_set->Representations, rep_id));
+
+  if (!representation->SegmentList) {
+    representation->SegmentList = gst_mpd_segment_list_node_new ();
+  }
+
+  segment_url = gst_mpd_segment_url_node_new ();
+
+  va_start (myargs, property_name);
+  g_object_set_valist (G_OBJECT (segment_url), property_name, myargs);
+  va_end (myargs);
+
+  gst_mpd_segment_list_node_add_segment (representation->SegmentList,
+      segment_url);
+
+  /* Set the media presentation time according to the new segment duration added */
+  g_object_get (client->mpd_root_node, "media-presentation-duration",
+      &media_presentation_duration, NULL);
+  media_presentation_duration +=
+      GST_MPD_MULT_SEGMENT_BASE_NODE (representation->SegmentList)->duration;
+  g_object_set (client->mpd_root_node, "media-presentation-duration",
+      media_presentation_duration, NULL);
+
+  return TRUE;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdclient.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdclient.h
new file mode 100644 (file)
index 0000000..c4fa796
--- /dev/null
@@ -0,0 +1,191 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GST_MPDCLIENT_H__
+#define __GST_MPDCLIENT_H__
+
+#include "gstmpdparser.h"
+#include "downloadhelper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_CLIENT gst_mpd_client2_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDClient2, gst_mpd_client2, GST, MPD_CLIENT, GstObject)
+
+struct _GstMPDClient2
+{
+  GstObject     parent_instance;
+  GstMPDRootNode *mpd_root_node;              /* mpd root node */
+
+  GList *periods;                             /* list of GstStreamPeriod */
+  guint period_idx;                           /* index of current Period */
+
+  GList *active_streams;                      /* list of GstActiveStream */
+
+  guint update_failed_count;
+  gchar *mpd_uri;                             /* manifest file URI */
+  gchar *mpd_base_uri;                        /* base URI for resolving relative URIs.
+                                               * this will be different for redirects */
+
+  /* profiles */
+  gboolean profile_isoff_ondemand;
+
+  DownloadHelper *download_helper;
+};
+
+/* Basic initialization/deinitialization functions */
+
+GstMPDClient2 *gst_mpd_client2_new (void);
+GstMPDClient2 *gst_mpd_client2_new_static (void);
+
+void gst_mpd_client2_active_streams_free (GstMPDClient2 * client);
+void gst_mpd_client2_free (GstMPDClient2 * client);
+
+/* main mpd parsing methods from xml data */
+gboolean gst_mpd_client2_parse (GstMPDClient2 * client, const gchar * data, gint size);
+
+/* xml generator */
+gboolean gst_mpd_client2_get_xml_content (GstMPDClient2 * client, gchar ** data, gint * size);
+
+void gst_mpd_client2_set_download_helper (GstMPDClient2 * client, DownloadHelper *dh);
+void  gst_mpd_client2_check_profiles (GstMPDClient2 * client);
+void gst_mpd_client2_fetch_on_load_external_resources (GstMPDClient2 * client);
+
+/* Streaming management */
+gboolean gst_mpd_client2_setup_media_presentation (GstMPDClient2 *client, GstClockTime time, gint period_index, const gchar *period_id);
+gboolean gst_mpd_client2_setup_streaming (GstMPDClient2 * client, GstMPDAdaptationSetNode * adapt_set);
+gboolean gst_mpd_client2_setup_representation (GstMPDClient2 *client, GstActiveStream *stream, GstMPDRepresentationNode *representation);
+
+GstClockTime gst_mpd_client2_get_next_fragment_duration (GstMPDClient2 * client, GstActiveStream * stream);
+GstClockTime gst_mpd_client2_get_media_presentation_duration (GstMPDClient2 *client);
+GstClockTime gst_mpd_client2_get_maximum_segment_duration (GstMPDClient2 * client);
+gboolean gst_mpd_client2_get_last_fragment_timestamp_end (GstMPDClient2 * client, guint stream_idx, GstClockTime * ts);
+gboolean gst_mpd_client2_get_next_fragment_timestamp (GstMPDClient2 * client, guint stream_idx, GstClockTime * ts);
+gboolean gst_mpd_client2_get_next_fragment (GstMPDClient2 *client, guint indexStream, GstMediaFragmentInfo * fragment);
+gboolean gst_mpd_client2_get_next_header (GstMPDClient2 *client, gchar **uri, guint stream_idx, gint64 * range_start, gint64 * range_end);
+gboolean gst_mpd_client2_get_next_header_index (GstMPDClient2 *client, gchar **uri, guint stream_idx, gint64 * range_start, gint64 * range_end);
+gboolean gst_mpd_client2_is_live (GstMPDClient2 * client);
+gboolean gst_mpd_client2_stream_seek (GstMPDClient2 * client, GstActiveStream * stream, gboolean forward, GstSeekFlags flags, GstClockTime ts, GstClockTime * final_ts);
+gboolean gst_mpd_client2_seek_to_time (GstMPDClient2 * client, GDateTime * time);
+GstClockTime gst_mpd_client2_get_stream_presentation_offset (GstMPDClient2 *client, guint stream_idx);
+gchar** gst_mpd_client2_get_utc_timing_sources (GstMPDClient2 *client, guint methods, GstMPDUTCTimingType *selected_method);
+GstClockTime gst_mpd_client2_get_period_start_time (GstMPDClient2 *client);
+
+GstCaps *gst_mpd_client2_get_codec_caps (GstActiveStream *stream);
+
+/* Period selection */
+guint gst_mpd_client2_get_period_index_at_time (GstMPDClient2 * client, GstDateTime * time);
+gboolean gst_mpd_client2_set_period_index (GstMPDClient2 *client, guint period_idx);
+gboolean gst_mpd_client2_set_period_id (GstMPDClient2 *client, const gchar * period_id);
+guint gst_mpd_client2_get_period_index (GstMPDClient2 *client);
+const gchar *gst_mpd_client2_get_period_id (GstMPDClient2 *client);
+gboolean gst_mpd_client2_has_next_period (GstMPDClient2 *client);
+gboolean gst_mpd_client2_has_previous_period (GstMPDClient2 * client);
+
+/* Representation selection */
+gint gst_mpd_client2_get_rep_idx_with_max_bandwidth (GList *Representations, gint64 max_bandwidth, gint max_video_width, gint max_video_height, gint max_video_framerate_n, gint max_video_framerate_d);
+gint gst_mpd_client2_get_rep_idx_with_min_bandwidth (GList * Representations);
+
+GstDateTime *
+gst_mpd_client2_get_availability_start_time (GstMPDClient2 * client);
+
+/* URL management */
+const gchar *gst_mpd_client2_get_baseURL (GstMPDClient2 *client, guint indexStream);
+gchar *gst_mpd_client2_parse_baseURL (GstMPDClient2 * client, GstActiveStream * stream, gchar ** query);
+
+/* Active stream */
+guint gst_mpd_client2_get_nb_active_stream (GstMPDClient2 *client);
+GstActiveStream *gst_mpd_client2_get_active_stream_by_index (GstMPDClient2 *client, guint stream_idx);
+gboolean gst_mpd_client2_active_stream_contains_subtitles (GstActiveStream * stream);
+
+/* AdaptationSet */
+guint gst_mpd_client2_get_nb_adaptationSet (GstMPDClient2 *client);
+GList * gst_mpd_client2_get_adaptation_sets (GstMPDClient2 * client);
+
+/* Segment */
+gboolean gst_mpd_client2_has_next_segment (GstMPDClient2 * client, GstActiveStream * stream, gboolean forward);
+GstFlowReturn gst_mpd_client2_advance_segment (GstMPDClient2 * client, GstActiveStream * stream, gboolean forward);
+void gst_mpd_client2_seek_to_first_segment (GstMPDClient2 * client);
+GstDateTime *gst_mpd_client2_get_next_segment_availability_start_time (GstMPDClient2 * client, GstActiveStream * stream);
+
+/* Get audio/video stream parameters (caps, width, height, rate, number of channels) */
+GstCaps * gst_mpd_client2_get_stream_caps (GstActiveStream * stream);
+gboolean gst_mpd_client2_get_bitstream_switching_flag (GstActiveStream * stream);
+guint gst_mpd_client2_get_video_stream_width (GstActiveStream * stream);
+guint gst_mpd_client2_get_video_stream_height (GstActiveStream * stream);
+gboolean gst_mpd_client2_get_video_stream_framerate (GstActiveStream * stream, gint * fps_num, gint * fps_den);
+guint gst_mpd_client2_get_audio_stream_rate (GstActiveStream * stream);
+guint gst_mpd_client2_get_audio_stream_num_channels (GstActiveStream * stream);
+
+/* Support multi language */
+guint gst_mpd_client2_get_list_and_nb_of_audio_language (GstMPDClient2 *client, GList **lang);
+
+GstClockTimeDiff gst_mpd_client2_calculate_time_difference (const GstDateTime * t1, const GstDateTime * t2);
+GstDateTime *gst_mpd_client2_add_time_difference (GstDateTime * t1, GstClockTimeDiff diff);
+gint64 gst_mpd_client2_parse_default_presentation_delay(GstMPDClient2 * client, const gchar * default_presentation_delay);
+
+/* profiles */
+gboolean gst_mpd_client2_has_isoff_ondemand_profile (GstMPDClient2 *client);
+
+/* add/set node methods */
+gboolean gst_mpd_client2_set_root_node (GstMPDClient2 * client,
+                                       const gchar * property_name,
+                                       ...);
+gchar * gst_mpd_client2_set_period_node (GstMPDClient2 * client,
+                                        gchar * period_id,
+                                        const gchar * property_name,
+                                        ...);
+guint gst_mpd_client2_set_adaptation_set_node (GstMPDClient2 * client,
+                                              gchar * period_id,
+                                              guint adap_set_id,
+                                              const gchar * property_name,
+                                              ...);
+gchar * gst_mpd_client2_set_representation_node (GstMPDClient2 * client,
+                                                gchar * period_id,
+                                                guint adap_set_id,
+                                                gchar * rep_id,
+                                                const gchar * property_name,
+                                                ...);
+gboolean gst_mpd_client2_set_segment_list (GstMPDClient2 * client,
+                                          gchar * period_id,
+                                          guint adap_set_id,
+                                          gchar * rep_id,
+                                          const gchar * property_name,
+                                          ...);
+gboolean gst_mpd_client2_set_segment_template (GstMPDClient2 * client,
+                                              gchar * period_id,
+                                              guint adap_set_id,
+                                              gchar * rep_id,
+                                              const gchar * property_name,
+                                              ...);
+
+/* create a new node */
+gboolean gst_mpd_client2_add_baseurl_node (GstMPDClient2 * client,
+                                          const gchar * property_name,
+                                          ...);
+gboolean gst_mpd_client2_add_segment_url (GstMPDClient2 * client,
+                                         gchar * period_id,
+                                         guint adap_set_id,
+                                         gchar * rep_id,
+                                         const gchar * property_name,
+                                         ...);
+G_END_DECLS
+
+#endif /* __GST_MPDCLIENT_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdcontentcomponentnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdcontentcomponentnode.c
new file mode 100644 (file)
index 0000000..779ba27
--- /dev/null
@@ -0,0 +1,120 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdcontentcomponentnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDContentComponentNode2, gst_mpd_content_component_node,
+    GST_TYPE_MPD_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_content_component_node_finalize (GObject * object)
+{
+  GstMPDContentComponentNode *self = GST_MPD_CONTENT_COMPONENT_NODE (object);
+
+  if (self->lang)
+    xmlFree (self->lang);
+  if (self->contentType)
+    xmlFree (self->contentType);
+  g_slice_free (GstXMLRatio, self->par);
+  g_list_free_full (self->Accessibility,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+  g_list_free_full (self->Role,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+  g_list_free_full (self->Rating,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+  g_list_free_full (self->Viewpoint,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+
+  G_OBJECT_CLASS (gst_mpd_content_component_node_parent_class)->finalize
+      (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_content_component_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr content_component_xml_node = NULL;
+  GstMPDContentComponentNode *self = GST_MPD_CONTENT_COMPONENT_NODE (node);
+  content_component_xml_node =
+      xmlNewNode (NULL, (xmlChar *) "ContentComponent");
+
+  gst_xml_helper_set_prop_uint (content_component_xml_node, "id", self->id);
+  gst_xml_helper_set_prop_string (content_component_xml_node, "lang",
+      self->lang);
+  gst_xml_helper_set_prop_string (content_component_xml_node, "contentType",
+      self->contentType);
+  gst_xml_helper_set_prop_ratio (content_component_xml_node, "par", self->par);
+
+  g_list_foreach (self->Accessibility, gst_mpd_node_get_list_item,
+      content_component_xml_node);
+  g_list_foreach (self->Role, gst_mpd_node_get_list_item,
+      content_component_xml_node);
+  g_list_foreach (self->Rating, gst_mpd_node_get_list_item,
+      content_component_xml_node);
+  g_list_foreach (self->Viewpoint, gst_mpd_node_get_list_item,
+      content_component_xml_node);
+
+  return content_component_xml_node;
+}
+
+static void
+gst_mpd_content_component_node_class_init (GstMPDContentComponentNodeClass *
+    klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_content_component_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_content_component_get_xml_node;
+}
+
+static void
+gst_mpd_content_component_node_init (GstMPDContentComponentNode * self)
+{
+  self->id = 0;
+  self->lang = NULL;
+  self->contentType = NULL;
+  self->par = 0;
+  self->Accessibility = 0;
+  self->Role = NULL;
+  self->Rating = NULL;
+  self->Viewpoint = NULL;
+}
+
+GstMPDContentComponentNode *
+gst_mpd_content_component_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_CONTENT_COMPONENT_NODE, NULL);
+}
+
+void
+gst_mpd_content_component_node_free (GstMPDContentComponentNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdcontentcomponentnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdcontentcomponentnode.h
new file mode 100644 (file)
index 0000000..d7f367c
--- /dev/null
@@ -0,0 +1,57 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDCONTENTCOMPONENTNODE_H__
+#define __GSTMPDCONTENTCOMPONENTNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_CONTENT_COMPONENT_NODE gst_mpd_content_component_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDContentComponentNode2, gst_mpd_content_component_node, GST, MPD_CONTENT_COMPONENT_NODE, GstMPDNode)
+
+typedef GstMPDContentComponentNode2 GstMPDContentComponentNode;
+typedef GstMPDContentComponentNode2Class GstMPDContentComponentNodeClass;
+
+struct _GstMPDContentComponentNode2
+{
+  GstObject parent_instance;
+  guint id;
+  gchar *lang; /* LangVectorType RFC 5646 */
+  gchar *contentType;
+  GstXMLRatio *par;
+  /* list of Accessibility DescriptorType nodes */
+  GList *Accessibility;
+  /* list of Role DescriptorType nodes */
+  GList *Role;
+  /* list of Rating DescriptorType nodes */
+  GList *Rating;
+  /* list of Viewpoint DescriptorType nodes */
+  GList *Viewpoint;
+};
+
+GstMPDContentComponentNode * gst_mpd_content_component_node_new (void);
+void gst_mpd_content_component_node_free (GstMPDContentComponentNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDCONTENTCOMPONENTNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpddescriptortypenode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpddescriptortypenode.c
new file mode 100644 (file)
index 0000000..2541b06
--- /dev/null
@@ -0,0 +1,99 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpddescriptortypenode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDDescriptorTypeNode2, gst_mpd_descriptor_type_node,
+    GST_TYPE_MPD_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_descriptor_type_node_finalize (GObject * object)
+{
+  GstMPDDescriptorTypeNode *self = GST_MPD_DESCRIPTOR_TYPE_NODE (object);
+
+  if (self->schemeIdUri)
+    xmlFree (self->schemeIdUri);
+  if (self->value)
+    xmlFree (self->value);
+  g_free (self->node_name);
+
+  G_OBJECT_CLASS (gst_mpd_descriptor_type_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_descriptor_type_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr descriptor_type_xml_node = NULL;
+  GstMPDDescriptorTypeNode *self = GST_MPD_DESCRIPTOR_TYPE_NODE (node);
+
+  descriptor_type_xml_node = xmlNewNode (NULL, (xmlChar *) self->node_name);
+
+  gst_xml_helper_set_prop_string (descriptor_type_xml_node, "schemeIdUri",
+      self->schemeIdUri);
+
+  gst_xml_helper_set_prop_string (descriptor_type_xml_node, "value",
+      self->value);
+
+  return descriptor_type_xml_node;
+}
+
+static void
+gst_mpd_descriptor_type_node_class_init (GstMPDDescriptorTypeNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_descriptor_type_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_descriptor_type_get_xml_node;
+}
+
+static void
+gst_mpd_descriptor_type_node_init (GstMPDDescriptorTypeNode * self)
+{
+  if (self->schemeIdUri)
+    xmlFree (self->schemeIdUri);
+  if (self->value)
+    xmlFree (self->value);
+}
+
+GstMPDDescriptorTypeNode *
+gst_mpd_descriptor_type_node_new (const gchar * name)
+{
+  GstMPDDescriptorTypeNode *self =
+      g_object_new (GST_TYPE_MPD_DESCRIPTOR_TYPE_NODE, NULL);
+  self->node_name = g_strdup (name);
+  return self;
+}
+
+void
+gst_mpd_descriptor_type_node_free (GstMPDDescriptorTypeNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpddescriptortypenode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpddescriptortypenode.h
new file mode 100644 (file)
index 0000000..a738108
--- /dev/null
@@ -0,0 +1,48 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDDESCRIPTORTYPENODE_H__
+#define __GSTMPDDESCRIPTORTYPENODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_DESCRIPTOR_TYPE_NODE gst_mpd_descriptor_type_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDDescriptorTypeNode2, gst_mpd_descriptor_type_node, GST, MPD_DESCRIPTOR_TYPE_NODE, GstMPDNode)
+
+typedef GstMPDDescriptorTypeNode2 GstMPDDescriptorTypeNode;
+typedef GstMPDDescriptorTypeNode2Class GstMPDDescriptorTypeNodeClass;
+
+struct _GstMPDDescriptorTypeNode2
+{
+  GstObject     parent_instance;
+  gchar *node_name;
+  gchar *schemeIdUri;
+  gchar *value;
+};
+
+GstMPDDescriptorTypeNode * gst_mpd_descriptor_type_node_new (const gchar* name);
+void gst_mpd_descriptor_type_node_free (GstMPDDescriptorTypeNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDDESCRIPTORTYPENODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdhelper.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdhelper.c
new file mode 100644 (file)
index 0000000..85a3002
--- /dev/null
@@ -0,0 +1,214 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+
+#include "gstmpdhelper.h"
+#include "gstmpdbaseurlnode.h"
+
+#include <gst/pbutils/pbutils.h>
+
+#define GST_CAT_DEFAULT gst_dash_demux2_debug
+
+gboolean
+gst_mpd_helper_get_mpd_type (xmlNode * a_node,
+    const gchar * property_name, GstMPDFileType * property_value)
+{
+  xmlChar *prop_string;
+  gboolean exists = FALSE;
+
+  *property_value = GST_MPD_FILE_TYPE_STATIC;   /* default */
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    if (xmlStrcmp (prop_string, (xmlChar *) "OnDemand") == 0
+        || xmlStrcmp (prop_string, (xmlChar *) "static") == 0) {
+      exists = TRUE;
+      *property_value = GST_MPD_FILE_TYPE_STATIC;
+      GST_LOG (" - %s: static", property_name);
+    } else if (xmlStrcmp (prop_string, (xmlChar *) "Live") == 0
+        || xmlStrcmp (prop_string, (xmlChar *) "dynamic") == 0) {
+      exists = TRUE;
+      *property_value = GST_MPD_FILE_TYPE_DYNAMIC;
+      GST_LOG (" - %s: dynamic", property_name);
+    } else {
+      GST_WARNING ("failed to parse MPD type property %s from xml string %s",
+          property_name, prop_string);
+    }
+    xmlFree (prop_string);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_mpd_helper_get_SAP_type (xmlNode * a_node,
+    const gchar * property_name, GstMPDSAPType * property_value)
+{
+  xmlChar *prop_string;
+  guint prop_SAP_type = 0;
+  gboolean exists = FALSE;
+
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    if (sscanf ((gchar *) prop_string, "%u", &prop_SAP_type) == 1
+        && prop_SAP_type <= 6) {
+      exists = TRUE;
+      *property_value = (GstMPDSAPType) prop_SAP_type;
+      GST_LOG (" - %s: %u", property_name, prop_SAP_type);
+    } else {
+      GST_WARNING
+          ("failed to parse unsigned integer property %s from xml string %s",
+          property_name, prop_string);
+    }
+    xmlFree (prop_string);
+  }
+
+  return exists;
+}
+
+const gchar *
+gst_mpd_helper_get_audio_codec_from_mime (GstCaps * caps)
+{
+  GstStructure *s;
+  const gchar *name = "";
+  const gchar *codec_name = NULL;
+
+  if (!caps)
+    return NULL;
+  s = gst_caps_get_structure (caps, 0);
+  if (!s)
+    goto done;
+  name = gst_structure_get_name (s);
+  if (!g_strcmp0 (name, "audio/mpeg")) {
+    gint mpeg_version;
+    if (gst_structure_get_int (s, "mpegversion", &mpeg_version)) {
+      if (mpeg_version == 4)
+        return "mp4a";
+    }
+
+  } else {
+    GST_DEBUG ("No codecs for this caps name %s", name);
+  }
+
+done:
+  return codec_name;
+}
+
+const gchar *
+gst_mpd_helper_get_video_codec_from_mime (GstCaps * caps)
+{
+  GstStructure *s;
+  const gchar *name = "";
+  const gchar *codec_name = NULL;
+
+  if (!caps)
+    return NULL;
+
+  s = gst_caps_get_structure (caps, 0);
+  if (!s)
+    goto done;
+  name = gst_structure_get_name (s);
+  if (!g_strcmp0 (name, "video/x-h264")) {
+    return "avc1";
+  } else if (!g_strcmp0 (name, "video/x-h265")) {
+    return "hvc1";
+  } else {
+    GST_DEBUG ("No codecs for this caps name %s", name);
+  }
+
+done:
+  return codec_name;
+}
+
+const gchar *
+gst_mpd_helper_mimetype_to_caps (const gchar * mimeType)
+{
+  if (mimeType == NULL)
+    return NULL;
+  if (strcmp (mimeType, "video/mp2t") == 0) {
+    return "video/mpegts, systemstream=(bool) true";
+  } else if (strcmp (mimeType, "video/mp4") == 0) {
+    return "video/quicktime";
+  } else if (strcmp (mimeType, "audio/mp4") == 0) {
+    return "audio/x-m4a";
+  } else if (strcmp (mimeType, "text/vtt") == 0) {
+    return "application/x-subtitle-vtt";
+  } else
+    return mimeType;
+}
+
+/* Some top-level mime types directly tell us which
+ * codec is inside */
+GstCaps *
+gst_mpd_helper_mimetype_to_codec_caps (const gchar * mimeType)
+{
+  if (mimeType == NULL)
+    return NULL;
+
+  if (strcmp (mimeType, "text/vtt") == 0)
+    return gst_caps_new_empty_simple ("application/x-subtitle-vtt");
+
+  return NULL;
+}
+
+/*
+ * Combine a base url with the current stream base url from the list of
+ * baseURLs. Takes ownership of base and returns a new base.
+ */
+GstUri *
+gst_mpd_helper_combine_urls (GstUri * base, GList * list, gchar ** query,
+    guint idx)
+{
+  GstMPDBaseURLNode *baseURL;
+  GstUri *ret = base;
+
+  if (list != NULL) {
+    baseURL = g_list_nth_data (list, idx);
+    if (!baseURL) {
+      baseURL = list->data;
+    }
+
+    ret = gst_uri_from_string_with_base (base, baseURL->baseURL);
+    gst_uri_unref (base);
+
+    if (ret && query) {
+      g_free (*query);
+      *query = gst_uri_get_query_string (ret);
+      if (*query) {
+        ret = gst_uri_make_writable (ret);
+        gst_uri_set_query_table (ret, NULL);
+      }
+    }
+  }
+
+  return ret;
+}
+
+/* comparison functions */
+int
+gst_mpd_helper_strncmp_ext (const char *s1, const char *s2)
+{
+  if (s1 == NULL && s2 == NULL)
+    return 0;
+  if (s1 == NULL && s2 != NULL)
+    return 1;
+  if (s2 == NULL && s1 != NULL)
+    return 1;
+  return strncmp (s1, s2, strlen (s2));
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdhelper.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdhelper.h
new file mode 100644 (file)
index 0000000..91cb7d3
--- /dev/null
@@ -0,0 +1,71 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GST_MPDHELPER_H__
+#define __GST_MPDHELPER_H__
+
+#include "gstxmlhelper.h"
+#include "gstmpdnode.h"
+#include "gstmpdurltypenode.h"
+#include "gstmpddescriptortypenode.h"
+#include "gstmpdsegmenttimelinenode.h"
+#include "gstmpdsegmentbasenode.h"
+
+
+G_BEGIN_DECLS
+
+typedef enum
+{
+  GST_SAP_TYPE_0 = 0,
+  GST_SAP_TYPE_1,
+  GST_SAP_TYPE_2,
+  GST_SAP_TYPE_3,
+  GST_SAP_TYPE_4,
+  GST_SAP_TYPE_5,
+  GST_SAP_TYPE_6
+} GstMPDSAPType;
+
+typedef enum
+{
+  GST_MPD_FILE_TYPE_STATIC = 0,
+  GST_MPD_FILE_TYPE_DYNAMIC
+} GstMPDFileType;
+
+#define GST_MPD_XLINK_ACTUATE_ON_LOAD_STR "onLoad"
+
+typedef enum
+{
+  GST_MPD_XLINK_ACTUATE_ON_REQUEST,
+  GST_MPD_XLINK_ACTUATE_ON_LOAD
+} GstMPDXLinkActuate;
+
+
+gboolean gst_mpd_helper_get_mpd_type (xmlNode * a_node, const gchar * property_name, GstMPDFileType * property_value);
+gboolean gst_mpd_helper_get_SAP_type (xmlNode * a_node, const gchar * property_name, GstMPDSAPType * property_value);
+
+const gchar * gst_mpd_helper_mimetype_to_caps (const gchar * mimeType);
+GstCaps *gst_mpd_helper_mimetype_to_codec_caps (const gchar * mimeType);
+const gchar * gst_mpd_helper_get_video_codec_from_mime (GstCaps * caps);
+const gchar * gst_mpd_helper_get_audio_codec_from_mime (GstCaps * caps);
+GstUri *gst_mpd_helper_combine_urls (GstUri * base, GList * list, gchar ** query, guint idx);
+int gst_mpd_helper_strncmp_ext (const char *s1, const char *s2);
+
+G_END_DECLS
+#endif /* __GST_MPDHELPER_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdlocationnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdlocationnode.c
new file mode 100644 (file)
index 0000000..257c75c
--- /dev/null
@@ -0,0 +1,84 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdlocationnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDLocationNode2, gst_mpd_location_node, GST_TYPE_MPD_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_location_node_finalize (GObject * object)
+{
+  GstMPDLocationNode *self = GST_MPD_LOCATION_NODE (object);
+
+  g_free (self->location);
+
+  G_OBJECT_CLASS (gst_mpd_location_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_location_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr location_xml_node = NULL;
+  GstMPDLocationNode *self = GST_MPD_LOCATION_NODE (node);
+
+  location_xml_node = xmlNewNode (NULL, (xmlChar *) "Location");
+
+  if (self->location)
+    gst_xml_helper_set_content (location_xml_node, self->location);
+
+  return location_xml_node;
+}
+
+static void
+gst_mpd_location_node_class_init (GstMPDLocationNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  object_class->finalize = gst_mpd_location_node_finalize;
+
+  m_klass = GST_MPD_NODE_CLASS (klass);
+  m_klass->get_xml_node = gst_mpd_location_get_xml_node;
+}
+
+static void
+gst_mpd_location_node_init (GstMPDLocationNode * self)
+{
+  self->location = NULL;
+}
+
+GstMPDLocationNode *
+gst_mpd_location_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_LOCATION_NODE, NULL);
+}
+
+void
+gst_mpd_location_node_free (GstMPDLocationNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdlocationnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdlocationnode.h
new file mode 100644 (file)
index 0000000..1395e81
--- /dev/null
@@ -0,0 +1,46 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDLOCATIONNODE_H__
+#define __GSTMPDLOCATIONNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_LOCATION_NODE gst_mpd_location_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDLocationNode2, gst_mpd_location_node, GST, MPD_LOCATION_NODE, GstMPDNode)
+
+typedef GstMPDLocationNode2 GstMPDLocationNode;
+typedef GstMPDLocationNode2Class GstMPDLocationNodeClass;
+
+struct _GstMPDLocationNode2
+{
+  GstObject     parent_instance;
+  gchar *location;
+};
+
+GstMPDLocationNode * gst_mpd_location_node_new (void);
+void gst_mpd_location_node_free (GstMPDLocationNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDLOCATIONNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsnode.c
new file mode 100644 (file)
index 0000000..1c736fe
--- /dev/null
@@ -0,0 +1,94 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdmetricsnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDMetricsNode2, gst_mpd_metrics_node, GST_TYPE_MPD_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_metrics_node_finalize (GObject * object)
+{
+  GstMPDMetricsNode *self = GST_MPD_METRICS_NODE (object);
+
+  g_free (self->metrics);
+  g_list_free_full (self->MetricsRanges,
+      (GDestroyNotify) gst_mpd_metrics_range_node_free);
+
+  G_OBJECT_CLASS (gst_mpd_metrics_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_metrics_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr metrics_xml_node = NULL;
+  GstMPDMetricsNode *self = GST_MPD_METRICS_NODE (node);
+
+  metrics_xml_node = xmlNewNode (NULL, (xmlChar *) "Metrics");
+
+  if (self->metrics)
+    gst_xml_helper_set_prop_string (metrics_xml_node, "metrics", self->metrics);
+
+  g_list_foreach (self->Reportings, gst_mpd_node_get_list_item,
+      metrics_xml_node);
+  g_list_foreach (self->MetricsRanges, gst_mpd_node_get_list_item,
+      metrics_xml_node);
+
+  return metrics_xml_node;
+}
+
+static void
+gst_mpd_metrics_node_class_init (GstMPDMetricsNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_metrics_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_metrics_get_xml_node;
+}
+
+static void
+gst_mpd_metrics_node_init (GstMPDMetricsNode * self)
+{
+  self->metrics = NULL;
+  self->MetricsRanges = NULL;
+  self->Reportings = NULL;
+}
+
+GstMPDMetricsNode *
+gst_mpd_metrics_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_METRICS_NODE, NULL);
+}
+
+void
+gst_mpd_metrics_node_free (GstMPDMetricsNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsnode.h
new file mode 100644 (file)
index 0000000..60dbec2
--- /dev/null
@@ -0,0 +1,50 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDMETRICSNODE_H__
+#define __GSTMPDMETRICSNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_METRICS_NODE gst_mpd_metrics_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDMetricsNode2, gst_mpd_metrics_node, GST, MPD_METRICS_NODE, GstMPDNode)
+
+typedef GstMPDMetricsNode2 GstMPDMetricsNode;
+typedef GstMPDMetricsNode2Class GstMPDMetricsNodeClass;
+
+struct _GstMPDMetricsNode2
+{
+  GstObject parent_instance;
+  gchar *metrics;
+  /* list of Metrics Range nodes */
+  GList *MetricsRanges;
+  /* list of Reporting nodes */
+  GList *Reportings;
+};
+
+GstMPDMetricsNode * gst_mpd_metrics_node_new (void);
+void gst_mpd_metrics_node_free (GstMPDMetricsNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDMETRICSNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsrangenode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsrangenode.c
new file mode 100644 (file)
index 0000000..5bf80d4
--- /dev/null
@@ -0,0 +1,75 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdmetricsrangenode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDMetricsRangeNode2, gst_mpd_metrics_range_node,
+    GST_TYPE_MPD_NODE);
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_metrics_range_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr metrics_range_xml_node = NULL;
+  GstMPDMetricsRangeNode *self = GST_MPD_METRICS_RANGE_NODE (node);
+
+  metrics_range_xml_node = xmlNewNode (NULL, (xmlChar *) "Range");
+
+  if (self->starttime)
+    gst_xml_helper_set_prop_duration (metrics_range_xml_node, "starttime",
+        self->starttime);
+  if (self->duration)
+    gst_xml_helper_set_prop_duration (metrics_range_xml_node, "duration",
+        self->duration);
+
+  return metrics_range_xml_node;
+}
+
+static void
+gst_mpd_metrics_range_node_class_init (GstMPDMetricsRangeNodeClass * klass)
+{
+  GstMPDNodeClass *m_klass;
+
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  m_klass->get_xml_node = gst_mpd_metrics_range_get_xml_node;
+}
+
+static void
+gst_mpd_metrics_range_node_init (GstMPDMetricsRangeNode * self)
+{
+  self->starttime = 0;          /* [ms] */
+  self->duration = 0;           /* [ms] */
+}
+
+GstMPDMetricsRangeNode *
+gst_mpd_metrics_range_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_METRICS_RANGE_NODE, NULL);
+}
+
+void
+gst_mpd_metrics_range_node_free (GstMPDMetricsRangeNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsrangenode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmetricsrangenode.h
new file mode 100644 (file)
index 0000000..27f6050
--- /dev/null
@@ -0,0 +1,47 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDMETRICSRANGENODE_H__
+#define __GSTMPDMETRICSRANGENODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_METRICS_RANGE_NODE gst_mpd_metrics_range_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDMetricsRangeNode2, gst_mpd_metrics_range_node, GST, MPD_METRICS_RANGE_NODE, GstMPDNode)
+
+typedef GstMPDMetricsRangeNode2 GstMPDMetricsRangeNode;
+typedef GstMPDMetricsRangeNode2Class GstMPDMetricsRangeNodeClass;
+
+struct _GstMPDMetricsRangeNode2
+{
+  GstObject parent_instance;
+  guint64 starttime;                 /* [ms] */
+  guint64 duration;                  /* [ms] */
+};
+
+GstMPDMetricsRangeNode * gst_mpd_metrics_range_node_new (void);
+void gst_mpd_metrics_range_node_free (GstMPDMetricsRangeNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDMETRICSRANGENODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmultsegmentbasenode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmultsegmentbasenode.c
new file mode 100644 (file)
index 0000000..470af5e
--- /dev/null
@@ -0,0 +1,153 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdmultsegmentbasenode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDMultSegmentBaseNode2, gst_mpd_mult_segment_base_node,
+    GST_TYPE_MPD_NODE);
+
+enum
+{
+  PROP_MPD_MULT_SEGMENT_BASE_0 = 100,
+  PROP_MPD_MULT_SEGMENT_BASE_DURATION,
+  PROP_MPD_MULT_SEGMENT_BASE_START_NUMBER,
+};
+
+/* GObject VMethods */
+
+static void
+gst_mpd_mult_segment_base_node_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstMPDMultSegmentBaseNode *self = GST_MPD_MULT_SEGMENT_BASE_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_MULT_SEGMENT_BASE_DURATION:
+      self->duration = g_value_get_uint (value);
+      break;
+    case PROP_MPD_MULT_SEGMENT_BASE_START_NUMBER:
+      self->startNumber = g_value_get_uint (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_mult_segment_base_node_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstMPDMultSegmentBaseNode *self = GST_MPD_MULT_SEGMENT_BASE_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_MULT_SEGMENT_BASE_DURATION:
+      g_value_set_uint (value, self->duration);
+      break;
+    case PROP_MPD_MULT_SEGMENT_BASE_START_NUMBER:
+      g_value_set_uint (value, self->startNumber);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_mult_segment_base_node_finalize (GObject * object)
+{
+  GstMPDMultSegmentBaseNode *self = GST_MPD_MULT_SEGMENT_BASE_NODE (object);
+
+  gst_mpd_segment_base_node_free (self->SegmentBase);
+  gst_mpd_segment_timeline_node_free (self->SegmentTimeline);
+  gst_mpd_url_type_node_free (self->BitstreamSwitching);
+
+  G_OBJECT_CLASS (gst_mpd_mult_segment_base_node_parent_class)->finalize
+      (object);
+}
+
+/* Base class */
+
+static void
+gst_mpd_mult_segment_base_get_xml_node (GstMPDNode * node,
+    xmlNodePtr mult_segment_base_node)
+{
+  GstMPDMultSegmentBaseNode *self = GST_MPD_MULT_SEGMENT_BASE_NODE (node);
+
+  if (self->duration)
+    gst_xml_helper_set_prop_uint (mult_segment_base_node, "duration",
+        self->duration);
+  if (self->startNumber)
+    gst_xml_helper_set_prop_uint (mult_segment_base_node, "startNumber",
+        self->startNumber);
+  if (self->SegmentBase)
+    gst_mpd_node_add_child_node (GST_MPD_NODE (self->SegmentBase),
+        mult_segment_base_node);
+  if (self->SegmentTimeline)
+    gst_mpd_node_add_child_node (GST_MPD_NODE (self->SegmentTimeline),
+        mult_segment_base_node);
+  if (self->BitstreamSwitching)
+    gst_mpd_node_add_child_node (GST_MPD_NODE (self->BitstreamSwitching),
+        mult_segment_base_node);
+}
+
+static void
+gst_mpd_mult_segment_base_node_class_init (GstMPDMultSegmentBaseNodeClass *
+    klass)
+{
+  GObjectClass *object_class;
+
+  object_class = G_OBJECT_CLASS (klass);
+
+  object_class->finalize = gst_mpd_mult_segment_base_node_finalize;
+  object_class->set_property = gst_mpd_mult_segment_base_node_set_property;
+  object_class->get_property = gst_mpd_mult_segment_base_node_get_property;
+
+  g_object_class_install_property (object_class,
+      PROP_MPD_MULT_SEGMENT_BASE_DURATION, g_param_spec_uint ("duration",
+          "duration", "duration of segment", 0, G_MAXINT, 0,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (object_class,
+      PROP_MPD_MULT_SEGMENT_BASE_START_NUMBER,
+      g_param_spec_uint ("start-number", "start number",
+          "start number in the segment list", 0, G_MAXINT, 0,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_mpd_mult_segment_base_node_init (GstMPDMultSegmentBaseNode * self)
+{
+  self->duration = 0;
+  self->startNumber = 0;
+  self->SegmentBase = NULL;
+  self->SegmentTimeline = NULL;
+  self->BitstreamSwitching = NULL;
+}
+
+void
+gst_mpd_mult_segment_base_node_add_child_node (GstMPDNode * node,
+    xmlNodePtr parent_xml_node)
+{
+  if (node) {
+    xmlNodePtr new_xml_node = gst_mpd_node_get_xml_pointer (node);
+    gst_mpd_mult_segment_base_get_xml_node (node, new_xml_node);
+    xmlAddChild (parent_xml_node, new_xml_node);
+  }
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmultsegmentbasenode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdmultsegmentbasenode.h
new file mode 100644 (file)
index 0000000..181ee7c
--- /dev/null
@@ -0,0 +1,51 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDMULTSEGMENTBASENODE_H__
+#define __GSTMPDMULTSEGMENTBASENODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_MULT_SEGMENT_BASE_NODE gst_mpd_mult_segment_base_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDMultSegmentBaseNode2, gst_mpd_mult_segment_base_node, GST, MPD_MULT_SEGMENT_BASE_NODE, GstMPDNode)
+
+typedef GstMPDMultSegmentBaseNode2 GstMPDMultSegmentBaseNode;
+typedef GstMPDMultSegmentBaseNode2Class GstMPDMultSegmentBaseNodeClass;
+
+struct _GstMPDMultSegmentBaseNode2
+{
+  GstObject     base;
+  guint duration;                  /* in seconds */
+  guint startNumber;
+  /* SegmentBaseType extension */
+  GstMPDSegmentBaseNode *SegmentBase;
+  /* SegmentTimeline node */
+  GstMPDSegmentTimelineNode *SegmentTimeline;
+  /* BitstreamSwitching node */
+  GstMPDURLTypeNode *BitstreamSwitching;
+};
+
+
+void gst_mpd_mult_segment_base_node_add_child_node (GstMPDNode* node, xmlNodePtr parent_xml_node);
+
+G_END_DECLS
+#endif /* __GSTMPDMULTSEGMENTBASENODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdnode.c
new file mode 100644 (file)
index 0000000..93e20f0
--- /dev/null
@@ -0,0 +1,78 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdnode.h"
+
+G_DEFINE_TYPE (GstMPDNode2, gst_mpd_node, GST_TYPE_OBJECT);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_node_class_init (GstMPDNode2Class * klass)
+{
+}
+
+static void
+gst_mpd_node_init (GstMPDNode2 * self)
+{
+}
+
+void
+gst_mpd_node_get_list_item (gpointer data, gpointer user_data)
+{
+  GstMPDNode *node = (GstMPDNode *) data;
+  xmlNodePtr parent_xml_node = (xmlNodePtr) user_data;
+  xmlNodePtr new_xml_node = gst_mpd_node_get_xml_pointer (node);
+
+  xmlAddChild (parent_xml_node, new_xml_node);
+}
+
+void
+gst_mpd_node_add_child_node (GstMPDNode * child, xmlNodePtr parent)
+{
+  xmlNodePtr new_xml_node = gst_mpd_node_get_xml_pointer (child);
+  xmlAddChild (parent, new_xml_node);
+}
+
+gboolean
+gst_mpd_node_get_xml_buffer (GstMPDNode * node, gchar ** xml_content,
+    int *xml_size)
+{
+  GstMPDNode2Class *klass;
+
+  klass = GST_MPD_NODE_GET_CLASS (node);
+  if (klass->get_xml_buffer)
+    return klass->get_xml_buffer (node, xml_content, xml_size);
+  else
+    return FALSE;
+}
+
+xmlNodePtr
+gst_mpd_node_get_xml_pointer (GstMPDNode * node)
+{
+  GstMPDNode2Class *klass;
+  if (!node)
+    return NULL;
+  klass = GST_MPD_NODE_GET_CLASS (node);
+  if (klass->get_xml_node)
+    return klass->get_xml_node (node);
+  else
+    return NULL;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdnode.h
new file mode 100644 (file)
index 0000000..e878762
--- /dev/null
@@ -0,0 +1,53 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDNODE_H__
+#define __GSTMPDNODE_H__
+
+#include <gst/gst.h>
+#include "gstxmlhelper.h"
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_NODE gst_mpd_node_get_type ()
+G_DECLARE_DERIVABLE_TYPE (GstMPDNode2, gst_mpd_node,GST, MPD_NODE, GstObject)
+
+typedef GstMPDNode2 GstMPDNode;
+typedef GstMPDNode2Class GstMPDNodeClass;
+
+#define glib_autoptr_clear_GstMPDNode glib_autoptr_clear_GstMPDNode2
+
+typedef gboolean (*GstMPDGetXMLBuffer) (GstMPDNode * n, gchar ** doc_content, int *doc_size);
+typedef xmlNodePtr (*GstMPDGetXMLNode) (GstMPDNode * n);
+
+struct _GstMPDNode2Class {
+    GstObjectClass base;
+
+    GstMPDGetXMLBuffer get_xml_buffer;
+    GstMPDGetXMLNode get_xml_node;
+};
+
+gboolean gst_mpd_node_get_xml_buffer (GstMPDNode * node, gchar ** xml_content, int * xml_size);
+xmlNodePtr gst_mpd_node_get_xml_pointer (GstMPDNode * node);
+
+void gst_mpd_node_get_list_item (gpointer data, gpointer user_data);
+void gst_mpd_node_add_child_node (GstMPDNode* data, xmlNodePtr user_data);
+
+G_END_DECLS
+#endif /* __GSTMPDNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdparser.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdparser.c
new file mode 100644 (file)
index 0000000..8e7ed18
--- /dev/null
@@ -0,0 +1,1569 @@
+/*
+ * DASH MPD parsing library
+ *
+ * gstmpdparser.c
+ *
+ * Copyright (C) 2012 STMicroelectronics
+ *
+ * Authors:
+ *   Gianluca Gennari <gennarone@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <string.h>
+
+#include <gst/pbutils/pbutils.h>
+#include "gstmpdparser.h"
+#include "gstdash_debug.h"
+
+#define GST_CAT_DEFAULT gst_dash_demux2_debug
+
+
+/* XML node parsing */
+static void gst_mpdparser_parse_baseURL_node (GList ** list, xmlNode * a_node);
+static void gst_mpdparser_parse_descriptor_type (GList ** list,
+    xmlNode * a_node);
+static void gst_mpdparser_parse_content_component_node (GList ** list,
+    xmlNode * a_node);
+static void gst_mpdparser_parse_location_node (GList ** list, xmlNode * a_node);
+static void gst_mpdparser_parse_subrepresentation_node (GList ** list,
+    xmlNode * a_node);
+static void gst_mpdparser_parse_segment_url_node (GList ** list,
+    xmlNode * a_node);
+static void gst_mpdparser_parse_url_type_node (GstMPDURLTypeNode ** pointer,
+    xmlNode * a_node);
+static void gst_mpdparser_parse_seg_base_type_ext (GstMPDSegmentBaseNode **
+    pointer, xmlNode * a_node, GstMPDSegmentBaseNode * parent);
+static void gst_mpdparser_parse_s_node (GQueue * queue, xmlNode * a_node);
+static void gst_mpdparser_parse_segment_timeline_node (GstMPDSegmentTimelineNode
+    ** pointer, xmlNode * a_node);
+static gboolean
+gst_mpdparser_parse_mult_seg_base_node (GstMPDMultSegmentBaseNode *
+    pointer, xmlNode * a_node, GstMPDMultSegmentBaseNode * parent);
+static gboolean gst_mpdparser_parse_segment_list_node (GstMPDSegmentListNode **
+    pointer, xmlNode * a_node, GstMPDSegmentListNode * parent);
+static void
+gst_mpdparser_parse_representation_base (GstMPDRepresentationBaseNode *
+    pointer, xmlNode * a_node);
+static gboolean gst_mpdparser_parse_representation_node (GList ** list,
+    xmlNode * a_node, GstMPDAdaptationSetNode * parent,
+    GstMPDPeriodNode * period_node);
+static gboolean gst_mpdparser_parse_adaptation_set_node (GList ** list,
+    xmlNode * a_node, GstMPDPeriodNode * parent);
+static void gst_mpdparser_parse_subset_node (GList ** list, xmlNode * a_node);
+static gboolean
+gst_mpdparser_parse_segment_template_node (GstMPDSegmentTemplateNode ** pointer,
+    xmlNode * a_node, GstMPDSegmentTemplateNode * parent);
+static gboolean gst_mpdparser_parse_period_node (GList ** list,
+    xmlNode * a_node);
+static void gst_mpdparser_parse_program_info_node (GList ** list,
+    xmlNode * a_node);
+static void gst_mpdparser_parse_metrics_range_node (GList ** list,
+    xmlNode * a_node);
+static void gst_mpdparser_parse_metrics_node (GList ** list, xmlNode * a_node);
+static gboolean gst_mpdparser_parse_root_node (GstMPDRootNode ** pointer,
+    xmlNode * a_node);
+static void gst_mpdparser_parse_utctiming_node (GList ** list,
+    xmlNode * a_node);
+
+/*
+  Duration Data Type
+
+  The duration data type is used to specify a time interval.
+
+  The time interval is specified in the following form "-PnYnMnDTnHnMnS" where:
+
+    * - indicates the negative sign (optional)
+    * P indicates the period (required)
+    * nY indicates the number of years
+    * nM indicates the number of months
+    * nD indicates the number of days
+    * T indicates the start of a time section (required if you are going to specify hours, minutes, or seconds)
+    * nH indicates the number of hours
+    * nM indicates the number of minutes
+    * nS indicates the number of seconds
+*/
+
+
+
+
+static void
+gst_mpdparser_parse_baseURL_node (GList ** list, xmlNode * a_node)
+{
+  GstMPDBaseURLNode *new_base_url;
+
+  new_base_url = gst_mpd_baseurl_node_new ();
+  *list = g_list_append (*list, new_base_url);
+
+  GST_LOG ("content of BaseURL node:");
+  gst_xml_helper_get_node_content (a_node, &new_base_url->baseURL);
+
+  GST_LOG ("attributes of BaseURL node:");
+  gst_xml_helper_get_prop_string (a_node, "serviceLocation",
+      &new_base_url->serviceLocation);
+  gst_xml_helper_get_prop_string (a_node, "byteRange",
+      &new_base_url->byteRange);
+}
+
+static void
+gst_mpdparser_parse_descriptor_type (GList ** list, xmlNode * a_node)
+{
+  GstMPDDescriptorTypeNode *new_descriptor;
+
+  new_descriptor =
+      gst_mpd_descriptor_type_node_new ((const gchar *) a_node->name);
+  *list = g_list_append (*list, new_descriptor);
+
+  GST_LOG ("attributes of %s node:", a_node->name);
+  gst_xml_helper_get_prop_string_stripped (a_node, "schemeIdUri",
+      &new_descriptor->schemeIdUri);
+  if (!gst_xml_helper_get_prop_string (a_node, "value", &new_descriptor->value)) {
+    /* if no value attribute, use XML string representation of the node */
+    gst_xml_helper_get_node_as_string (a_node, &new_descriptor->value);
+  }
+}
+
+static void
+gst_mpdparser_parse_content_component_node (GList ** list, xmlNode * a_node)
+{
+  xmlNode *cur_node;
+  GstMPDContentComponentNode *new_content_component;
+
+  new_content_component = gst_mpd_content_component_node_new ();
+  *list = g_list_append (*list, new_content_component);
+
+  GST_LOG ("attributes of ContentComponent node:");
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "id", 0,
+      &new_content_component->id);
+  gst_xml_helper_get_prop_string (a_node, "lang", &new_content_component->lang);
+  gst_xml_helper_get_prop_string (a_node, "contentType",
+      &new_content_component->contentType);
+  gst_xml_helper_get_prop_ratio (a_node, "par", &new_content_component->par);
+
+  /* explore children nodes */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "Accessibility") == 0) {
+        gst_mpdparser_parse_descriptor_type
+            (&new_content_component->Accessibility, cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Role") == 0) {
+        gst_mpdparser_parse_descriptor_type (&new_content_component->Role,
+            cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Rating") == 0) {
+        gst_mpdparser_parse_descriptor_type
+            (&new_content_component->Rating, cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Viewpoint") == 0) {
+        gst_mpdparser_parse_descriptor_type
+            (&new_content_component->Viewpoint, cur_node);
+      }
+    }
+  }
+}
+
+static void
+gst_mpdparser_parse_location_node (GList ** list, xmlNode * a_node)
+{
+  gchar *location = NULL;
+  GstMPDLocationNode *locationNode;
+
+  GST_LOG ("content of Location node:");
+  if (gst_xml_helper_get_node_content (a_node, &location)) {
+    locationNode = gst_mpd_location_node_new ();
+    locationNode->location = location;
+    *list = g_list_append (*list, locationNode);
+  }
+}
+
+static void
+gst_mpdparser_parse_subrepresentation_node (GList ** list, xmlNode * a_node)
+{
+  GstMPDSubRepresentationNode *new_subrep;
+
+  new_subrep = gst_mpd_sub_representation_node_new ();
+  *list = g_list_append (*list, new_subrep);
+
+  GST_LOG ("attributes of SubRepresentation node:");
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "level", 0,
+      &new_subrep->level);
+  gst_xml_helper_get_prop_uint_vector_type (a_node, "dependencyLevel",
+      &new_subrep->dependencyLevel, &new_subrep->dependencyLevel_size);
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "bandwidth", 0,
+      &new_subrep->bandwidth);
+  gst_xml_helper_get_prop_string_vector_type (a_node,
+      "contentComponent", &new_subrep->contentComponent);
+
+  /* RepresentationBase extension */
+  gst_mpdparser_parse_representation_base (GST_MPD_REPRESENTATION_BASE_NODE
+      (new_subrep), a_node);
+}
+
+
+
+static void
+gst_mpdparser_parse_segment_url_node (GList ** list, xmlNode * a_node)
+{
+  GstMPDSegmentURLNode *new_segment_url;
+
+  new_segment_url = gst_mpd_segment_url_node_new ();
+  *list = g_list_append (*list, new_segment_url);
+
+  GST_LOG ("attributes of SegmentURL node:");
+  gst_xml_helper_get_prop_string (a_node, "media", &new_segment_url->media);
+  gst_xml_helper_get_prop_range (a_node, "mediaRange",
+      &new_segment_url->mediaRange);
+  gst_xml_helper_get_prop_string (a_node, "index", &new_segment_url->index);
+  gst_xml_helper_get_prop_range (a_node, "indexRange",
+      &new_segment_url->indexRange);
+}
+
+static void
+gst_mpdparser_parse_url_type_node (GstMPDURLTypeNode ** pointer,
+    xmlNode * a_node)
+{
+  GstMPDURLTypeNode *new_url_type;
+
+  gst_mpd_url_type_node_free (*pointer);
+  *pointer = new_url_type =
+      gst_mpd_url_type_node_new ((const gchar *) a_node->name);
+
+  GST_LOG ("attributes of URLType node:");
+  gst_xml_helper_get_prop_string (a_node, "sourceURL",
+      &new_url_type->sourceURL);
+  gst_xml_helper_get_prop_range (a_node, "range", &new_url_type->range);
+}
+
+static void
+gst_mpdparser_parse_seg_base_type_ext (GstMPDSegmentBaseNode ** pointer,
+    xmlNode * a_node, GstMPDSegmentBaseNode * parent)
+{
+  xmlNode *cur_node;
+  GstMPDSegmentBaseNode *seg_base_type;
+  guint intval;
+  guint64 int64val;
+  gboolean boolval;
+  GstXMLRange *rangeval;
+
+  gst_mpd_segment_base_node_free (*pointer);
+  *pointer = seg_base_type = gst_mpd_segment_base_node_new ();
+
+  /* Initialize values that have defaults */
+  seg_base_type->indexRangeExact = FALSE;
+  seg_base_type->timescale = 1;
+
+  /* Inherit attribute values from parent */
+  if (parent) {
+    seg_base_type->timescale = parent->timescale;
+    seg_base_type->presentationTimeOffset = parent->presentationTimeOffset;
+    seg_base_type->indexRange = gst_xml_helper_clone_range (parent->indexRange);
+    seg_base_type->indexRangeExact = parent->indexRangeExact;
+    seg_base_type->Initialization =
+        gst_mpd_url_type_node_clone (parent->Initialization);
+    seg_base_type->RepresentationIndex =
+        gst_mpd_url_type_node_clone (parent->RepresentationIndex);
+  }
+
+  /* We must retrieve each value first to see if it exists.  If it does not
+   * exist, we do not want to overwrite an inherited value */
+  GST_LOG ("attributes of SegmentBaseType extension:");
+  if (gst_xml_helper_get_prop_unsigned_integer (a_node, "timescale", 1,
+          &intval)) {
+    seg_base_type->timescale = intval;
+  }
+  if (gst_xml_helper_get_prop_unsigned_integer_64 (a_node,
+          "presentationTimeOffset", 0, &int64val)) {
+    seg_base_type->presentationTimeOffset = int64val;
+  }
+  if (gst_xml_helper_get_prop_range (a_node, "indexRange", &rangeval)) {
+    if (seg_base_type->indexRange) {
+      g_slice_free (GstXMLRange, seg_base_type->indexRange);
+    }
+    seg_base_type->indexRange = rangeval;
+  }
+  if (gst_xml_helper_get_prop_boolean (a_node, "indexRangeExact",
+          FALSE, &boolval)) {
+    seg_base_type->indexRangeExact = boolval;
+  }
+
+  /* explore children nodes */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "Initialization") == 0 ||
+          xmlStrcmp (cur_node->name, (xmlChar *) "Initialisation") == 0) {
+        /* parse will free the previous pointer to create a new one */
+        gst_mpdparser_parse_url_type_node (&seg_base_type->Initialization,
+            cur_node);
+      } else if (xmlStrcmp (cur_node->name,
+              (xmlChar *) "RepresentationIndex") == 0) {
+        /* parse will free the previous pointer to create a new one */
+        gst_mpdparser_parse_url_type_node (&seg_base_type->RepresentationIndex,
+            cur_node);
+      }
+    }
+  }
+}
+
+
+
+static void
+gst_mpdparser_parse_s_node (GQueue * queue, xmlNode * a_node)
+{
+  GstMPDSNode *new_s_node;
+
+  new_s_node = gst_mpd_s_node_new ();
+  g_queue_push_tail (queue, new_s_node);
+
+  GST_LOG ("attributes of S node:");
+  gst_xml_helper_get_prop_unsigned_integer_64 (a_node, "t", 0, &new_s_node->t);
+  gst_xml_helper_get_prop_unsigned_integer_64 (a_node, "d", 0, &new_s_node->d);
+  gst_xml_helper_get_prop_signed_integer (a_node, "r", 0, &new_s_node->r);
+}
+
+
+
+static void
+gst_mpdparser_parse_segment_timeline_node (GstMPDSegmentTimelineNode ** pointer,
+    xmlNode * a_node)
+{
+  xmlNode *cur_node;
+  GstMPDSegmentTimelineNode *new_seg_timeline;
+
+  gst_mpd_segment_timeline_node_free (*pointer);
+  *pointer = new_seg_timeline = gst_mpd_segment_timeline_node_new ();
+  if (new_seg_timeline == NULL) {
+    GST_WARNING ("Allocation of SegmentTimeline node failed!");
+    return;
+  }
+
+  /* explore children nodes */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "S") == 0) {
+        gst_mpdparser_parse_s_node (&new_seg_timeline->S, cur_node);
+      }
+    }
+  }
+}
+
+static gboolean
+gst_mpdparser_parse_mult_seg_base_node (GstMPDMultSegmentBaseNode *
+    mult_seg_base_node, xmlNode * a_node, GstMPDMultSegmentBaseNode * parent)
+{
+  xmlNode *cur_node;
+
+  guint intval;
+  gboolean has_timeline = FALSE, has_duration = FALSE;
+
+  mult_seg_base_node->duration = 0;
+  mult_seg_base_node->startNumber = 1;
+
+  /* Inherit attribute values from parent */
+  if (parent) {
+    mult_seg_base_node->duration = parent->duration;
+    mult_seg_base_node->startNumber = parent->startNumber;
+    mult_seg_base_node->SegmentTimeline =
+        gst_mpd_segment_timeline_node_clone (parent->SegmentTimeline);
+    mult_seg_base_node->BitstreamSwitching =
+        gst_mpd_url_type_node_clone (parent->BitstreamSwitching);
+  }
+  GST_LOG ("attributes of MultipleSegmentBaseType extension:");
+  if (gst_xml_helper_get_prop_unsigned_integer (a_node, "duration", 0, &intval)) {
+    mult_seg_base_node->duration = intval;
+  }
+
+  /* duration might be specified from parent */
+  if (mult_seg_base_node->duration)
+    has_duration = TRUE;
+
+  if (gst_xml_helper_get_prop_unsigned_integer (a_node, "startNumber", 1,
+          &intval)) {
+    mult_seg_base_node->startNumber = intval;
+  }
+
+  GST_LOG ("extension of MultipleSegmentBaseType extension:");
+  gst_mpdparser_parse_seg_base_type_ext (&mult_seg_base_node->SegmentBase,
+      a_node, (parent ? parent->SegmentBase : NULL));
+
+  /* explore children nodes */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentTimeline") == 0) {
+        /* parse frees the segmenttimeline if any */
+        gst_mpdparser_parse_segment_timeline_node
+            (&mult_seg_base_node->SegmentTimeline, cur_node);
+      } else if (xmlStrcmp (cur_node->name,
+              (xmlChar *) "BitstreamSwitching") == 0) {
+        /* parse frees the old url before setting the new one */
+        gst_mpdparser_parse_url_type_node
+            (&mult_seg_base_node->BitstreamSwitching, cur_node);
+      }
+    }
+  }
+
+  has_timeline = mult_seg_base_node->SegmentTimeline != NULL;
+
+  /* Checking duration and timeline only at Representation's child level */
+  if (xmlStrcmp (a_node->parent->name, (xmlChar *) "Representation") == 0
+      && !has_duration && !has_timeline) {
+    GST_ERROR ("segment has neither duration nor timeline");
+  }
+
+  return TRUE;
+}
+
+static gboolean
+gst_mpdparser_parse_segment_list_node (GstMPDSegmentListNode ** pointer,
+    xmlNode * a_node, GstMPDSegmentListNode * parent)
+{
+  xmlNode *cur_node;
+  GstMPDSegmentListNode *new_segment_list;
+  gchar *actuate;
+  gboolean segment_urls_inherited_from_parent = FALSE;
+
+  gst_mpd_segment_list_node_free (*pointer);
+  new_segment_list = gst_mpd_segment_list_node_new ();
+
+  /* Inherit attribute values from parent */
+  if (parent) {
+    GList *list;
+    GstMPDSegmentURLNode *seg_url;
+    for (list = g_list_first (parent->SegmentURL); list;
+        list = g_list_next (list)) {
+      seg_url = (GstMPDSegmentURLNode *) list->data;
+      new_segment_list->SegmentURL =
+          g_list_append (new_segment_list->SegmentURL,
+          gst_mpd_segment_url_node_clone (seg_url));
+      segment_urls_inherited_from_parent = TRUE;
+    }
+  }
+
+  new_segment_list->actuate = GST_MPD_XLINK_ACTUATE_ON_REQUEST;
+  if (gst_xml_helper_get_ns_prop_string (a_node,
+          "http://www.w3.org/1999/xlink", "href", &new_segment_list->xlink_href)
+      && gst_xml_helper_get_ns_prop_string (a_node,
+          "http://www.w3.org/1999/xlink", "actuate", &actuate)) {
+    if (strcmp (actuate, GST_MPD_XLINK_ACTUATE_ON_LOAD_STR) == 0)
+      new_segment_list->actuate = GST_MPD_XLINK_ACTUATE_ON_LOAD;
+    xmlFree (actuate);
+  }
+
+  GST_LOG ("extension of SegmentList node:");
+  if (!gst_mpdparser_parse_mult_seg_base_node
+      (GST_MPD_MULT_SEGMENT_BASE_NODE (new_segment_list), a_node,
+          (parent ? GST_MPD_MULT_SEGMENT_BASE_NODE (parent) : NULL)))
+    goto error;
+
+  /* explore children nodes */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentURL") == 0) {
+        if (segment_urls_inherited_from_parent) {
+          /*
+           * SegmentBase, SegmentTemplate and SegmentList shall inherit
+           * attributes and elements from the same element on a higher level.
+           * If the same attribute or element is present on both levels,
+           * the one on the lower level shall take precedence over the one
+           * on the higher level.
+           */
+
+          /* Clear the list of inherited segment URLs */
+          g_list_free_full (new_segment_list->SegmentURL,
+              (GDestroyNotify) gst_mpd_segment_url_node_free);
+          new_segment_list->SegmentURL = NULL;
+
+          /* mark the fact that we cleared the list, so that it is not tried again */
+          segment_urls_inherited_from_parent = FALSE;
+        }
+        gst_mpdparser_parse_segment_url_node (&new_segment_list->SegmentURL,
+            cur_node);
+      }
+    }
+  }
+
+  *pointer = new_segment_list;
+  return TRUE;
+
+error:
+  gst_mpd_segment_list_node_free (new_segment_list);
+  return FALSE;
+}
+
+static void
+gst_mpdparser_parse_content_protection_node (GList ** list, xmlNode * a_node)
+{
+  gchar *value = NULL;
+  if (gst_xml_helper_get_prop_string (a_node, "value", &value)) {
+    if (!g_strcmp0 (value, "MSPR 2.0")) {
+      xmlNode *cur_node;
+      for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+        if (cur_node->type == XML_ELEMENT_NODE) {
+          if (xmlStrcmp (cur_node->name, (xmlChar *) "pro") == 0) {
+            GstMPDDescriptorTypeNode *new_descriptor;
+            new_descriptor = gst_mpd_descriptor_type_node_new ((const gchar *)
+                cur_node->name);
+            *list = g_list_append (*list, new_descriptor);
+
+            gst_xml_helper_get_prop_string_stripped (a_node, "schemeIdUri",
+                &new_descriptor->schemeIdUri);
+
+            gst_xml_helper_get_node_content (cur_node, &new_descriptor->value);
+            goto beach;
+          }
+        }
+      }
+    } else {
+      gst_mpdparser_parse_descriptor_type (list, a_node);
+    }
+  } else {
+    gst_mpdparser_parse_descriptor_type (list, a_node);
+  }
+beach:
+  if (value)
+    g_free (value);
+}
+
+static void
+gst_mpdparser_parse_representation_base (GstMPDRepresentationBaseNode *
+    representation_base, xmlNode * a_node)
+{
+  xmlNode *cur_node;
+
+  GST_LOG ("attributes of RepresentationBaseType extension:");
+  gst_xml_helper_get_prop_string (a_node, "profiles",
+      &representation_base->profiles);
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "width", 0,
+      &representation_base->width);
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "height", 0,
+      &representation_base->height);
+  gst_xml_helper_get_prop_ratio (a_node, "sar", &representation_base->sar);
+  gst_xml_helper_get_prop_framerate (a_node, "frameRate",
+      &representation_base->frameRate);
+  gst_xml_helper_get_prop_framerate (a_node, "minFrameRate",
+      &representation_base->minFrameRate);
+  gst_xml_helper_get_prop_framerate (a_node, "maxFrameRate",
+      &representation_base->maxFrameRate);
+  gst_xml_helper_get_prop_string (a_node, "audioSamplingRate",
+      &representation_base->audioSamplingRate);
+  gst_xml_helper_get_prop_string (a_node, "mimeType",
+      &representation_base->mimeType);
+  gst_xml_helper_get_prop_string (a_node, "segmentProfiles",
+      &representation_base->segmentProfiles);
+  gst_xml_helper_get_prop_string (a_node, "codecs",
+      &representation_base->codecs);
+  if (representation_base->codecs) {
+    GST_DEBUG ("Getting caps ");
+    representation_base->caps =
+        gst_codec_utils_caps_from_mime_codec (representation_base->codecs);
+  } else {
+    representation_base->caps =
+        gst_mpd_helper_mimetype_to_codec_caps (representation_base->mimeType);
+    GST_DEBUG ("Getting caps from mime type gave %" GST_PTR_FORMAT,
+        representation_base->caps);
+  }
+  gst_xml_helper_get_prop_double (a_node, "maximumSAPPeriod",
+      &representation_base->maximumSAPPeriod);
+  gst_mpd_helper_get_SAP_type (a_node, "startWithSAP",
+      &representation_base->startWithSAP);
+  gst_xml_helper_get_prop_double (a_node, "maxPlayoutRate",
+      &representation_base->maxPlayoutRate);
+  gst_xml_helper_get_prop_boolean (a_node, "codingDependency",
+      FALSE, &representation_base->codingDependency);
+  gst_xml_helper_get_prop_string (a_node, "scanType",
+      &representation_base->scanType);
+
+  /* explore children nodes */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "FramePacking") == 0) {
+        gst_mpdparser_parse_descriptor_type
+            (&representation_base->FramePacking, cur_node);
+      } else if (xmlStrcmp (cur_node->name,
+              (xmlChar *) "AudioChannelConfiguration") == 0) {
+        gst_mpdparser_parse_descriptor_type
+            (&representation_base->AudioChannelConfiguration, cur_node);
+      } else if (xmlStrcmp (cur_node->name,
+              (xmlChar *) "ContentProtection") == 0) {
+        gst_mpdparser_parse_content_protection_node
+            (&representation_base->ContentProtection, cur_node);
+      }
+    }
+  }
+}
+
+static gboolean
+gst_mpdparser_parse_representation_node (GList ** list, xmlNode * a_node,
+    GstMPDAdaptationSetNode * parent, GstMPDPeriodNode * period_node)
+{
+  xmlNode *cur_node;
+  GstMPDRepresentationNode *new_representation;
+
+  new_representation = gst_mpd_representation_node_new ();
+
+  GST_LOG ("attributes of Representation node:");
+  if (!gst_xml_helper_get_prop_string (a_node, "id", &new_representation->id)) {
+    GST_ERROR ("Cannot parse Representation id, invalid manifest");
+    goto error;
+  }
+  if (!gst_xml_helper_get_prop_unsigned_integer (a_node, "bandwidth", 0,
+          &new_representation->bandwidth)) {
+    GST_ERROR ("Cannot parse Representation bandwidth, invalid manifest");
+    goto error;
+  }
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "qualityRanking", 0,
+      &new_representation->qualityRanking);
+  gst_xml_helper_get_prop_string_vector_type (a_node, "dependencyId",
+      &new_representation->dependencyId);
+  gst_xml_helper_get_prop_string_vector_type (a_node,
+      "mediaStreamStructureId", &new_representation->mediaStreamStructureId);
+  /* RepresentationBase extension */
+  gst_mpdparser_parse_representation_base
+      (GST_MPD_REPRESENTATION_BASE_NODE (new_representation), a_node);
+
+  /* explore children nodes */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentBase") == 0) {
+        gst_mpdparser_parse_seg_base_type_ext (&new_representation->SegmentBase,
+            cur_node, parent->SegmentBase ?
+            parent->SegmentBase : period_node->SegmentBase);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentTemplate") == 0) {
+        if (!gst_mpdparser_parse_segment_template_node
+            (&new_representation->SegmentTemplate, cur_node,
+                parent->SegmentTemplate ?
+                parent->SegmentTemplate : period_node->SegmentTemplate))
+          goto error;
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentList") == 0) {
+        if (!gst_mpdparser_parse_segment_list_node
+            (&new_representation->SegmentList, cur_node,
+                parent->SegmentList ? parent->SegmentList : period_node->
+                SegmentList))
+          goto error;
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "BaseURL") == 0) {
+        gst_mpdparser_parse_baseURL_node (&new_representation->BaseURLs,
+            cur_node);
+      } else if (xmlStrcmp (cur_node->name,
+              (xmlChar *) "SubRepresentation") == 0) {
+        gst_mpdparser_parse_subrepresentation_node
+            (&new_representation->SubRepresentations, cur_node);
+      }
+    }
+  }
+
+  /* some sanity checking */
+
+  *list = g_list_append (*list, new_representation);
+  return TRUE;
+
+error:
+  gst_mpd_representation_node_free (new_representation);
+  return FALSE;
+}
+
+static gboolean
+gst_mpdparser_parse_adaptation_set_node (GList ** list, xmlNode * a_node,
+    GstMPDPeriodNode * parent)
+{
+  xmlNode *cur_node;
+  GstMPDAdaptationSetNode *new_adap_set;
+  gchar *actuate;
+
+  new_adap_set = gst_mpd_adaptation_set_node_new ();
+
+  GST_LOG ("attributes of AdaptationSet node:");
+
+  new_adap_set->actuate = GST_MPD_XLINK_ACTUATE_ON_REQUEST;
+  if (gst_xml_helper_get_ns_prop_string (a_node,
+          "http://www.w3.org/1999/xlink", "href", &new_adap_set->xlink_href)
+      && gst_xml_helper_get_ns_prop_string (a_node,
+          "http://www.w3.org/1999/xlink", "actuate", &actuate)) {
+    if (strcmp (actuate, "onLoad") == 0)
+      new_adap_set->actuate = GST_MPD_XLINK_ACTUATE_ON_LOAD;
+    xmlFree (actuate);
+  }
+
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "id", 0, &new_adap_set->id);
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "group", 0,
+      &new_adap_set->group);
+  gst_xml_helper_get_prop_string (a_node, "lang", &new_adap_set->lang);
+  gst_xml_helper_get_prop_string (a_node, "contentType",
+      &new_adap_set->contentType);
+  gst_xml_helper_get_prop_ratio (a_node, "par", &new_adap_set->par);
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "minBandwidth", 0,
+      &new_adap_set->minBandwidth);
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "maxBandwidth", 0,
+      &new_adap_set->maxBandwidth);
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "minWidth", 0,
+      &new_adap_set->minWidth);
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "maxWidth", 0,
+      &new_adap_set->maxWidth);
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "minHeight", 0,
+      &new_adap_set->minHeight);
+  gst_xml_helper_get_prop_unsigned_integer (a_node, "maxHeight", 0,
+      &new_adap_set->maxHeight);
+  gst_xml_helper_get_prop_cond_uint (a_node, "segmentAlignment",
+      &new_adap_set->segmentAlignment);
+  gst_xml_helper_get_prop_boolean (a_node, "bitstreamSwitching",
+      parent->bitstreamSwitching, &new_adap_set->bitstreamSwitching);
+  if (parent->bitstreamSwitching && !new_adap_set->bitstreamSwitching) {
+    /* according to the standard, if the Period's bitstreamSwitching attribute
+     * is true, the AdaptationSet should not have the bitstreamSwitching
+     * attribute set to false.
+     * We should return a parsing error, but we are generous and ignore the
+     * standard violation.
+     */
+    new_adap_set->bitstreamSwitching = parent->bitstreamSwitching;
+  }
+  gst_xml_helper_get_prop_cond_uint (a_node, "subsegmentAlignment",
+      &new_adap_set->subsegmentAlignment);
+  gst_mpd_helper_get_SAP_type (a_node, "subsegmentStartsWithSAP",
+      &new_adap_set->subsegmentStartsWithSAP);
+
+  /* RepresentationBase extension */
+  gst_mpdparser_parse_representation_base
+      (GST_MPD_REPRESENTATION_BASE_NODE (new_adap_set), a_node);
+
+  /* explore children nodes */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "Accessibility") == 0) {
+        gst_mpdparser_parse_descriptor_type (&new_adap_set->Accessibility,
+            cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Role") == 0) {
+        gst_mpdparser_parse_descriptor_type (&new_adap_set->Role, cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Rating") == 0) {
+        gst_mpdparser_parse_descriptor_type (&new_adap_set->Rating, cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Viewpoint") == 0) {
+        gst_mpdparser_parse_descriptor_type (&new_adap_set->Viewpoint,
+            cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "BaseURL") == 0) {
+        gst_mpdparser_parse_baseURL_node (&new_adap_set->BaseURLs, cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentBase") == 0) {
+        gst_mpdparser_parse_seg_base_type_ext (&new_adap_set->SegmentBase,
+            cur_node, parent->SegmentBase);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentList") == 0) {
+        if (!gst_mpdparser_parse_segment_list_node (&new_adap_set->SegmentList,
+                cur_node, parent->SegmentList))
+          goto error;
+      } else if (xmlStrcmp (cur_node->name,
+              (xmlChar *) "ContentComponent") == 0) {
+        gst_mpdparser_parse_content_component_node
+            (&new_adap_set->ContentComponents, cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentTemplate") == 0) {
+        if (!gst_mpdparser_parse_segment_template_node
+            (&new_adap_set->SegmentTemplate, cur_node, parent->SegmentTemplate))
+          goto error;
+      }
+    }
+  }
+
+  /* We must parse Representation after everything else in the AdaptationSet
+   * has been parsed because certain Representation child elements can inherit
+   * attributes specified by the same element in the AdaptationSet
+   */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "Representation") == 0) {
+        if (!gst_mpdparser_parse_representation_node
+            (&new_adap_set->Representations, cur_node, new_adap_set, parent))
+          goto error;
+      }
+    }
+  }
+
+  *list = g_list_append (*list, new_adap_set);
+  return TRUE;
+
+error:
+  gst_mpd_adaptation_set_node_free (new_adap_set);
+  return FALSE;
+}
+
+static void
+gst_mpdparser_parse_subset_node (GList ** list, xmlNode * a_node)
+{
+  GstMPDSubsetNode *new_subset;
+
+  new_subset = gst_mpd_subset_node_new ();
+  *list = g_list_append (*list, new_subset);
+
+  GST_LOG ("attributes of Subset node:");
+  gst_xml_helper_get_prop_uint_vector_type (a_node, "contains",
+      &new_subset->contains, &new_subset->contains_size);
+}
+
+static gboolean
+gst_mpdparser_parse_segment_template_node (GstMPDSegmentTemplateNode ** pointer,
+    xmlNode * a_node, GstMPDSegmentTemplateNode * parent)
+{
+  GstMPDSegmentTemplateNode *new_segment_template;
+  gchar *strval;
+
+  gst_mpd_segment_template_node_free (*pointer);
+  new_segment_template = gst_mpd_segment_template_node_new ();
+
+  GST_LOG ("extension of SegmentTemplate node:");
+  if (!gst_mpdparser_parse_mult_seg_base_node
+      (GST_MPD_MULT_SEGMENT_BASE_NODE (new_segment_template), a_node,
+          (parent ? GST_MPD_MULT_SEGMENT_BASE_NODE (parent) : NULL)))
+    goto error;
+
+  /* Inherit attribute values from parent when the value isn't found */
+  GST_LOG ("attributes of SegmentTemplate node:");
+  if (gst_xml_helper_get_prop_string (a_node, "media", &strval)) {
+    new_segment_template->media = strval;
+  } else if (parent) {
+    new_segment_template->media = xmlMemStrdup (parent->media);
+  }
+
+  if (gst_xml_helper_get_prop_string (a_node, "index", &strval)) {
+    new_segment_template->index = strval;
+  } else if (parent) {
+    new_segment_template->index = xmlMemStrdup (parent->index);
+  }
+
+  if (gst_xml_helper_get_prop_string (a_node, "initialization", &strval)) {
+    new_segment_template->initialization = strval;
+  } else if (parent) {
+    new_segment_template->initialization =
+        xmlMemStrdup (parent->initialization);
+  }
+
+  if (gst_xml_helper_get_prop_string (a_node, "bitstreamSwitching", &strval)) {
+    new_segment_template->bitstreamSwitching = strval;
+  } else if (parent) {
+    new_segment_template->bitstreamSwitching =
+        xmlMemStrdup (parent->bitstreamSwitching);
+  }
+
+  *pointer = new_segment_template;
+  return TRUE;
+
+error:
+  gst_mpd_segment_template_node_free (new_segment_template);
+  return FALSE;
+}
+
+static gboolean
+gst_mpdparser_parse_period_node (GList ** list, xmlNode * a_node)
+{
+  xmlNode *cur_node;
+  GstMPDPeriodNode *new_period;
+  gchar *actuate;
+
+  new_period = gst_mpd_period_node_new ();
+
+  GST_LOG ("attributes of Period node:");
+
+  new_period->actuate = GST_MPD_XLINK_ACTUATE_ON_REQUEST;
+  if (gst_xml_helper_get_ns_prop_string (a_node,
+          "http://www.w3.org/1999/xlink", "href", &new_period->xlink_href)
+      && gst_xml_helper_get_ns_prop_string (a_node,
+          "http://www.w3.org/1999/xlink", "actuate", &actuate)) {
+    if (strcmp (actuate, "onLoad") == 0)
+      new_period->actuate = GST_MPD_XLINK_ACTUATE_ON_LOAD;
+    xmlFree (actuate);
+  }
+
+  gst_xml_helper_get_prop_string (a_node, "id", &new_period->id);
+  gst_xml_helper_get_prop_duration (a_node, "start", GST_MPD_DURATION_NONE,
+      &new_period->start);
+  gst_xml_helper_get_prop_duration (a_node, "duration",
+      GST_MPD_DURATION_NONE, &new_period->duration);
+  gst_xml_helper_get_prop_boolean (a_node, "bitstreamSwitching", FALSE,
+      &new_period->bitstreamSwitching);
+
+  /* explore children nodes */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentBase") == 0) {
+        gst_mpdparser_parse_seg_base_type_ext (&new_period->SegmentBase,
+            cur_node, NULL);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentList") == 0) {
+        if (!gst_mpdparser_parse_segment_list_node (&new_period->SegmentList,
+                cur_node, NULL))
+          goto error;
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "SegmentTemplate") == 0) {
+        if (!gst_mpdparser_parse_segment_template_node
+            (&new_period->SegmentTemplate, cur_node, NULL))
+          goto error;
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Subset") == 0) {
+        gst_mpdparser_parse_subset_node (&new_period->Subsets, cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "BaseURL") == 0) {
+        gst_mpdparser_parse_baseURL_node (&new_period->BaseURLs, cur_node);
+      }
+    }
+  }
+
+  /* We must parse AdaptationSet after everything else in the Period has been
+   * parsed because certain AdaptationSet child elements can inherit attributes
+   * specified by the same element in the Period
+   */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "AdaptationSet") == 0) {
+        if (!gst_mpdparser_parse_adaptation_set_node
+            (&new_period->AdaptationSets, cur_node, new_period))
+          goto error;
+      }
+    }
+  }
+
+  *list = g_list_append (*list, new_period);
+  return TRUE;
+
+error:
+  gst_mpd_period_node_free (new_period);
+  return FALSE;
+}
+
+static void
+gst_mpdparser_parse_program_info_node (GList ** list, xmlNode * a_node)
+{
+  xmlNode *cur_node;
+  GstMPDProgramInformationNode *new_prog_info;
+
+  new_prog_info = gst_mpd_program_information_node_new ();
+  *list = g_list_append (*list, new_prog_info);
+
+  GST_LOG ("attributes of ProgramInformation node:");
+  gst_xml_helper_get_prop_string (a_node, "lang", &new_prog_info->lang);
+  gst_xml_helper_get_prop_string (a_node, "moreInformationURL",
+      &new_prog_info->moreInformationURL);
+
+  /* explore children nodes */
+  GST_LOG ("children of ProgramInformation node:");
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "Title") == 0) {
+        gst_xml_helper_get_node_content (cur_node, &new_prog_info->Title);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Source") == 0) {
+        gst_xml_helper_get_node_content (cur_node, &new_prog_info->Source);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Copyright") == 0) {
+        gst_xml_helper_get_node_content (cur_node, &new_prog_info->Copyright);
+      }
+    }
+  }
+}
+
+static void
+gst_mpdparser_parse_metrics_range_node (GList ** list, xmlNode * a_node)
+{
+  GstMPDMetricsRangeNode *new_metrics_range;
+
+  new_metrics_range = gst_mpd_metrics_range_node_new ();
+  *list = g_list_append (*list, new_metrics_range);
+
+  GST_LOG ("attributes of Metrics Range node:");
+  gst_xml_helper_get_prop_duration (a_node, "starttime",
+      GST_MPD_DURATION_NONE, &new_metrics_range->starttime);
+  gst_xml_helper_get_prop_duration (a_node, "duration",
+      GST_MPD_DURATION_NONE, &new_metrics_range->duration);
+}
+
+static void
+gst_mpdparser_parse_metrics_node (GList ** list, xmlNode * a_node)
+{
+  xmlNode *cur_node;
+  GstMPDMetricsNode *new_metrics;
+
+  new_metrics = gst_mpd_metrics_node_new ();
+  *list = g_list_append (*list, new_metrics);
+
+  GST_LOG ("attributes of Metrics node:");
+  gst_xml_helper_get_prop_string (a_node, "metrics", &new_metrics->metrics);
+
+  /* explore children nodes */
+  GST_LOG ("children of Metrics node:");
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "Range") == 0) {
+        gst_mpdparser_parse_metrics_range_node (&new_metrics->MetricsRanges,
+            cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Reporting") == 0) {
+        /* No reporting scheme is specified in this part of ISO/IEC 23009.
+         * It is expected that external specifications may define formats
+         * and delivery for the reporting data. */
+        GST_LOG (" - Reporting node found (unknown structure)");
+      }
+    }
+  }
+}
+
+/* The UTCTiming element is defined in
+ * ISO/IEC 23009-1:2014/PDAM 1 "Information technology — Dynamic adaptive streaming over HTTP (DASH) — Part 1: Media presentation description and segment formats / Amendment 1: High Profile and Availability Time Synchronization"
+ */
+static void
+gst_mpdparser_parse_utctiming_node (GList ** list, xmlNode * a_node)
+{
+  GstMPDUTCTimingNode *new_timing;
+  gchar *method = NULL;
+  gchar *value = NULL;
+
+  new_timing = gst_mpd_utctiming_node_new ();
+
+  GST_LOG ("attributes of UTCTiming node:");
+  if (gst_xml_helper_get_prop_string (a_node, "schemeIdUri", &method)) {
+    new_timing->method = gst_mpd_utctiming_get_method (method);
+    xmlFree (method);
+  }
+
+  if (gst_xml_helper_get_prop_string (a_node, "value", &value)) {
+    int max_tokens = 0;
+    if (GST_MPD_UTCTIMING_TYPE_DIRECT == new_timing->method) {
+      /* The GST_MPD_UTCTIMING_TYPE_DIRECT method is a special case
+       * that is not a space separated list.
+       */
+      max_tokens = 1;
+    }
+    new_timing->urls = g_strsplit (value, " ", max_tokens);
+    xmlFree (value);
+  }
+
+  /* append to list only if both method and urls were set */
+  if (new_timing->method != 0 && new_timing->urls != NULL &&
+      g_strv_length (new_timing->urls) != 0) {
+    *list = g_list_append (*list, new_timing);
+  } else {
+    gst_mpd_utctiming_node_free (new_timing);
+  }
+}
+
+static gboolean
+gst_mpdparser_parse_root_node (GstMPDRootNode ** pointer, xmlNode * a_node)
+{
+  xmlNode *cur_node;
+  GstMPDRootNode *new_mpd_root;
+
+  gst_mpd_root_node_free (*pointer);
+  *pointer = NULL;
+  new_mpd_root = gst_mpd_root_node_new ();
+
+  GST_LOG ("namespaces of root MPD node:");
+  new_mpd_root->default_namespace =
+      gst_xml_helper_get_node_namespace (a_node, NULL);
+  new_mpd_root->namespace_xsi =
+      gst_xml_helper_get_node_namespace (a_node, "xsi");
+  new_mpd_root->namespace_ext =
+      gst_xml_helper_get_node_namespace (a_node, "ext");
+
+  GST_LOG ("attributes of root MPD node:");
+  gst_xml_helper_get_prop_string (a_node, "schemaLocation",
+      &new_mpd_root->schemaLocation);
+  gst_xml_helper_get_prop_string (a_node, "id", &new_mpd_root->id);
+  gst_xml_helper_get_prop_string (a_node, "profiles", &new_mpd_root->profiles);
+  gst_mpd_helper_get_mpd_type (a_node, "type", &new_mpd_root->type);
+  gst_xml_helper_get_prop_dateTime (a_node, "availabilityStartTime",
+      &new_mpd_root->availabilityStartTime);
+  gst_xml_helper_get_prop_dateTime (a_node, "availabilityEndTime",
+      &new_mpd_root->availabilityEndTime);
+  gst_xml_helper_get_prop_duration (a_node, "mediaPresentationDuration",
+      GST_MPD_DURATION_NONE, &new_mpd_root->mediaPresentationDuration);
+  gst_xml_helper_get_prop_duration (a_node, "minimumUpdatePeriod",
+      GST_MPD_DURATION_NONE, &new_mpd_root->minimumUpdatePeriod);
+  gst_xml_helper_get_prop_duration (a_node, "minBufferTime",
+      GST_MPD_DURATION_NONE, &new_mpd_root->minBufferTime);
+  gst_xml_helper_get_prop_duration (a_node, "timeShiftBufferDepth",
+      GST_MPD_DURATION_NONE, &new_mpd_root->timeShiftBufferDepth);
+  gst_xml_helper_get_prop_duration (a_node, "suggestedPresentationDelay",
+      GST_MPD_DURATION_NONE, &new_mpd_root->suggestedPresentationDelay);
+  gst_xml_helper_get_prop_duration (a_node, "maxSegmentDuration",
+      GST_MPD_DURATION_NONE, &new_mpd_root->maxSegmentDuration);
+  gst_xml_helper_get_prop_duration (a_node, "maxSubsegmentDuration",
+      GST_MPD_DURATION_NONE, &new_mpd_root->maxSubsegmentDuration);
+
+  /* explore children Period nodes */
+  for (cur_node = a_node->children; cur_node; cur_node = cur_node->next) {
+    if (cur_node->type == XML_ELEMENT_NODE) {
+      if (xmlStrcmp (cur_node->name, (xmlChar *) "Period") == 0) {
+        if (!gst_mpdparser_parse_period_node (&new_mpd_root->Periods, cur_node))
+          goto error;
+      } else if (xmlStrcmp (cur_node->name,
+              (xmlChar *) "ProgramInformation") == 0) {
+        gst_mpdparser_parse_program_info_node (&new_mpd_root->ProgramInfos,
+            cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "BaseURL") == 0) {
+        gst_mpdparser_parse_baseURL_node (&new_mpd_root->BaseURLs, cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Location") == 0) {
+        gst_mpdparser_parse_location_node (&new_mpd_root->Locations, cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "Metrics") == 0) {
+        gst_mpdparser_parse_metrics_node (&new_mpd_root->Metrics, cur_node);
+      } else if (xmlStrcmp (cur_node->name, (xmlChar *) "UTCTiming") == 0) {
+        gst_mpdparser_parse_utctiming_node (&new_mpd_root->UTCTimings,
+            cur_node);
+      }
+    }
+  }
+
+  *pointer = new_mpd_root;
+  return TRUE;
+
+error:
+  gst_mpd_root_node_free (new_mpd_root);
+  return FALSE;
+}
+
+/* internal memory management functions */
+
+/* ISO/IEC 23009-1:2004 5.3.9.4.4 */
+static gboolean
+validate_format (const gchar * format)
+{
+  const gchar *p = format;
+
+  /* Check if it starts with % */
+  if (!p || p[0] != '%')
+    return FALSE;
+  p++;
+
+  /* the spec mandates a format like %0[width]d */
+  /* Following the %, we must have a 0 */
+  if (p[0] != '0')
+    return FALSE;
+
+  /* Following the % must be a number starting with 0
+   */
+  while (g_ascii_isdigit (*p))
+    p++;
+
+  /* After any 0 and alphanumeric values, there must be a d.
+   */
+  if (p[0] != 'd')
+    return FALSE;
+  p++;
+
+  /* And then potentially more characters without any
+   * further %, even if the spec does not mention this
+   */
+  p = strchr (p, '%');
+  if (p)
+    return FALSE;
+
+  return TRUE;
+}
+
+static gchar *
+promote_format_to_uint64 (const gchar * format)
+{
+  const gchar *p = format;
+  gchar *promoted_format;
+
+  /* Must be called with a validated format! */
+  g_return_val_if_fail (validate_format (format), NULL);
+
+  /* it starts with % */
+  p++;
+
+  /* Following the % must be a 0, or any of d, x or u.
+   * x and u are not part of the spec, but don't hurt us
+   */
+  if (p[0] == '0') {
+    p++;
+
+    while (g_ascii_isdigit (*p))
+      p++;
+  }
+
+  /* After any 0 and alphanumeric values, there must be a d.
+   * Otherwise validation would have failed
+   */
+  g_assert (p[0] == 'd');
+
+  promoted_format =
+      g_strdup_printf ("%.*s" G_GINT64_MODIFIER "%s", (gint) (p - format),
+      format, p);
+
+  return promoted_format;
+}
+
+static gboolean
+gst_mpdparser_validate_rfc1738_url (const char *s)
+{
+  while (*s) {
+    if (!strchr
+        (";:@&=aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ0123456789$-_.+!*'(),%/",
+            *s))
+      return FALSE;
+    if (*s == '%') {
+      /* g_ascii_isdigit returns FALSE for NUL, and || is a short circuiting
+         operator, so this is safe for strings ending before two hex digits */
+      if (!g_ascii_isxdigit (s[1]) || !g_ascii_isxdigit (s[2]))
+        return FALSE;
+      s += 2;
+    }
+    s++;
+  }
+  return TRUE;
+}
+
+void
+gst_mpdparser_media_fragment_info_clear (GstMediaFragmentInfo * fragment)
+{
+  g_free (fragment->uri);
+  g_free (fragment->index_uri);
+}
+
+/* API */
+gboolean
+gst_mpdparser_get_mpd_root_node (GstMPDRootNode ** mpd_root_node,
+    const gchar * data, gint size)
+{
+  gboolean ret = FALSE;
+
+  if (data) {
+    xmlDocPtr doc;
+    xmlNode *root_element = NULL;
+
+    GST_DEBUG ("MPD file fully buffered, start parsing...");
+
+    /* parse the complete MPD file into a tree (using the libxml2 default parser API) */
+
+    /* this initialize the library and check potential ABI mismatches
+     * between the version it was compiled for and the actual shared
+     * library used
+     */
+    LIBXML_TEST_VERSION;
+
+    /* parse "data" into a document (which is a libxml2 tree structure xmlDoc) */
+    doc = xmlReadMemory (data, size, "noname.xml", NULL, XML_PARSE_NONET);
+    if (doc == NULL) {
+      GST_ERROR ("failed to parse the MPD file");
+      ret = FALSE;
+    } else {
+      /* get the root element node */
+      root_element = xmlDocGetRootElement (doc);
+
+      if (root_element->type != XML_ELEMENT_NODE
+          || xmlStrcmp (root_element->name, (xmlChar *) "MPD") != 0) {
+        GST_ERROR
+            ("can not find the root element MPD, failed to parse the MPD file");
+        ret = FALSE;            /* used to return TRUE before, but this seems wrong */
+      } else {
+        /* now we can parse the MPD root node and all children nodes, recursively */
+        ret = gst_mpdparser_parse_root_node (mpd_root_node, root_element);
+      }
+      /* free the document */
+      xmlFreeDoc (doc);
+    }
+  }
+
+  return ret;
+}
+
+GstMPDSegmentListNode *
+gst_mpdparser_get_external_segment_list (const gchar * data, gint size,
+    GstMPDSegmentListNode * parent)
+{
+  xmlDocPtr doc = NULL;
+  GstMPDSegmentListNode *new_segment_list = NULL;
+
+  doc = xmlReadMemory (data, size, "noname.xml", NULL, XML_PARSE_NONET);
+
+
+  /* NOTE: ISO/IEC 23009-1:2014 5.3.9.3.2 is saying that one or multiple SegmentList
+   * in external xml is allowed, however, multiple SegmentList does not make sense
+   * because Period/AdaptationSet/Representation allow only one SegmentList */
+  if (doc) {
+    xmlNode *root_element = xmlDocGetRootElement (doc);
+
+
+    if (root_element->type == XML_ELEMENT_NODE &&
+        xmlStrcmp (root_element->name, (xmlChar *) "SegmentList") == 0) {
+      gst_mpdparser_parse_segment_list_node (&new_segment_list, root_element,
+          parent);
+    }
+  }
+
+  if (doc)
+    xmlFreeDoc (doc);
+
+  return new_segment_list;
+}
+
+GList *
+gst_mpdparser_get_external_periods (const gchar * data, gint size)
+{
+  xmlDocPtr doc = NULL;
+  GList *new_periods = NULL;
+
+  doc = xmlReadMemory (data, size, "noname.xml", NULL, XML_PARSE_NONET);
+
+
+  if (doc) {
+    xmlNode *root_element = xmlDocGetRootElement (doc);
+    xmlNode *iter;
+
+    for (iter = root_element->children; iter; iter = iter->next) {
+      if (iter->type == XML_ELEMENT_NODE) {
+        if (xmlStrcmp (iter->name, (xmlChar *) "Period") == 0) {
+          gst_mpdparser_parse_period_node (&new_periods, iter);
+        } else {
+          goto error;
+        }
+      }
+    }
+  }
+
+done:
+  if (doc)
+    xmlFreeDoc (doc);
+
+  return new_periods;
+
+error:
+  GST_ERROR ("Failed to parse period node XML");
+
+  if (new_periods) {
+    g_list_free_full (new_periods, (GDestroyNotify) gst_mpd_period_node_free);
+    new_periods = NULL;
+  }
+  goto done;
+}
+
+GList *
+gst_mpdparser_get_external_adaptation_sets (const gchar * data, gint size,
+    GstMPDPeriodNode * period)
+{
+  xmlDocPtr doc = NULL;
+  GList *new_adaptation_sets = NULL;
+
+  doc = xmlReadMemory (data, size, "noname.xml", NULL, XML_PARSE_NONET);
+
+  /* NOTE: ISO/IEC 23009-1:2014 5.3.3.2 is saying that exactly one AdaptationSet
+   * in external xml is allowed */
+  if (doc) {
+    xmlNode *root_element = xmlDocGetRootElement (doc);
+    if (root_element->type == XML_ELEMENT_NODE &&
+        xmlStrcmp (root_element->name, (xmlChar *) "AdaptationSet") == 0) {
+      gst_mpdparser_parse_adaptation_set_node (&new_adaptation_sets,
+          root_element, period);
+    }
+  }
+
+  if (doc)
+    xmlFreeDoc (doc);
+
+  return new_adaptation_sets;
+}
+
+void
+gst_mpdparser_free_stream_period (GstStreamPeriod * stream_period)
+{
+  if (stream_period) {
+    g_slice_free (GstStreamPeriod, stream_period);
+  }
+}
+
+void
+gst_mpdparser_free_media_segment (GstMediaSegment * media_segment)
+{
+  if (media_segment) {
+    g_slice_free (GstMediaSegment, media_segment);
+  }
+}
+
+void
+gst_mpdparser_init_active_stream_segments (GstActiveStream * stream)
+{
+  g_assert (stream->segments == NULL);
+  stream->segments = g_ptr_array_new ();
+  g_ptr_array_set_free_func (stream->segments,
+      (GDestroyNotify) gst_mpdparser_free_media_segment);
+}
+
+void
+gst_mpdparser_free_active_stream (GstActiveStream * active_stream)
+{
+  if (active_stream) {
+    g_free (active_stream->baseURL);
+    active_stream->baseURL = NULL;
+    g_free (active_stream->queryURL);
+    active_stream->queryURL = NULL;
+    if (active_stream->segments)
+      g_ptr_array_unref (active_stream->segments);
+    g_slice_free (GstActiveStream, active_stream);
+  }
+}
+
+const gchar *
+gst_mpdparser_get_initializationURL (GstActiveStream * stream,
+    GstMPDURLTypeNode * InitializationURL)
+{
+  const gchar *url_prefix;
+
+  g_return_val_if_fail (stream != NULL, NULL);
+
+  url_prefix = (InitializationURL
+      && InitializationURL->sourceURL) ? InitializationURL->sourceURL : stream->
+      baseURL;
+
+  return url_prefix;
+}
+
+gchar *
+gst_mpdparser_get_mediaURL (GstActiveStream * stream,
+    GstMPDSegmentURLNode * segmentURL)
+{
+  const gchar *url_prefix;
+
+  g_return_val_if_fail (stream != NULL, NULL);
+  g_return_val_if_fail (segmentURL != NULL, NULL);
+
+  url_prefix = segmentURL->media ? segmentURL->media : stream->baseURL;
+  g_return_val_if_fail (url_prefix != NULL, NULL);
+
+  return segmentURL->media;
+}
+
+/* navigation functions */
+GstStreamMimeType
+gst_mpdparser_representation_get_mimetype (GstMPDAdaptationSetNode * adapt_set,
+    GstMPDRepresentationNode * rep)
+{
+  gchar *mime = NULL;
+  if (rep)
+    mime = GST_MPD_REPRESENTATION_BASE_NODE (rep)->mimeType;
+  if (mime == NULL) {
+    mime = GST_MPD_REPRESENTATION_BASE_NODE (adapt_set)->mimeType;
+  }
+
+  if (gst_mpd_helper_strncmp_ext (mime, "audio") == 0)
+    return GST_STREAM_AUDIO;
+  if (gst_mpd_helper_strncmp_ext (mime, "video") == 0)
+    return GST_STREAM_VIDEO;
+  if (gst_mpd_helper_strncmp_ext (mime, "application") == 0
+      || gst_mpd_helper_strncmp_ext (mime, "text") == 0)
+    return GST_STREAM_APPLICATION;
+
+  return GST_STREAM_UNKNOWN;
+}
+
+/* Helper methods */
+gchar *
+gst_mpdparser_build_URL_from_template (const gchar * url_template,
+    const gchar * id, guint number, guint bandwidth, guint64 time)
+{
+  static const gchar default_format[] = "%01d";
+  gchar **tokens, *token, *ret;
+  const gchar *format;
+  gint i, num_tokens;
+
+  g_return_val_if_fail (url_template != NULL, NULL);
+  tokens = g_strsplit_set (url_template, "$", -1);
+  if (!tokens) {
+    GST_WARNING ("Scan of URL template failed!");
+    return NULL;
+  }
+  num_tokens = g_strv_length (tokens);
+
+  /*
+   * each identifier is guarded by 2 $, which means that we must have an odd number of tokens
+   * An even number of tokens means the string is not valid.
+   */
+  if ((num_tokens & 1) == 0) {
+    GST_ERROR ("Invalid number of tokens (%d). url_template is '%s'",
+        num_tokens, url_template);
+    g_strfreev (tokens);
+    return NULL;
+  }
+
+  for (i = 0; i < num_tokens; i++) {
+    token = tokens[i];
+    format = default_format;
+
+    /* the tokens to replace must be provided between $ characters, eg $token$
+     * For a string like token0$token1$token2$token3$token4, only the odd number
+     * tokens (1,3,...) must be parsed.
+     *
+     * Skip even tokens
+     */
+    if ((i & 1) == 0)
+      continue;
+
+    if (!g_strcmp0 (token, "RepresentationID")) {
+      if (!gst_mpdparser_validate_rfc1738_url (id))
+        goto invalid_representation_id;
+
+      tokens[i] = g_strdup_printf ("%s", id);
+      g_free (token);
+    } else if (!strncmp (token, "Number", 6)) {
+      if (strlen (token) > 6) {
+        format = token + 6;     /* format tag */
+      }
+      if (!validate_format (format))
+        goto invalid_format;
+
+      tokens[i] = g_strdup_printf (format, number);
+      g_free (token);
+    } else if (!strncmp (token, "Bandwidth", 9)) {
+      if (strlen (token) > 9) {
+        format = token + 9;     /* format tag */
+      }
+      if (!validate_format (format))
+        goto invalid_format;
+
+      tokens[i] = g_strdup_printf (format, bandwidth);
+      g_free (token);
+    } else if (!strncmp (token, "Time", 4)) {
+      gchar *promoted_format;
+
+      if (strlen (token) > 4) {
+        format = token + 4;     /* format tag */
+      }
+      if (!validate_format (format))
+        goto invalid_format;
+
+      promoted_format = promote_format_to_uint64 (format);
+      tokens[i] = g_strdup_printf (promoted_format, time);
+      g_free (promoted_format);
+      g_free (token);
+    } else if (!g_strcmp0 (token, "")) {
+      tokens[i] = g_strdup_printf ("%s", "$");
+      g_free (token);
+    } else {
+      /* unexpected identifier found between $ signs
+       *
+       * "If the URL contains unescaped $ symbols which do not enclose a valid
+       * identifier then the result of URL formation is undefined"
+       */
+      goto invalid_format;
+    }
+  }
+
+  ret = g_strjoinv (NULL, tokens);
+
+  g_strfreev (tokens);
+
+  return ret;
+
+invalid_format:
+  {
+    GST_ERROR ("Invalid format '%s' in '%s'", format, token);
+
+    g_strfreev (tokens);
+
+    return NULL;
+  }
+invalid_representation_id:
+  {
+    GST_ERROR
+        ("Representation ID string '%s' has characters invalid in an RFC 1738 URL",
+        id);
+
+    g_strfreev (tokens);
+
+    return NULL;
+  }
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdparser.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdparser.h
new file mode 100644 (file)
index 0000000..69b2935
--- /dev/null
@@ -0,0 +1,173 @@
+/*
+ * DASH MPD parsing library
+ *
+ * gstmpdparser.h
+ *
+ * Copyright (C) 2012 STMicroelectronics
+ *
+ * Authors:
+ *   Gianluca Gennari <gennarone@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_MPDPARSER_H__
+#define __GST_MPDPARSER_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+#include "gstadaptivedemux.h"
+
+#include "gstmpdhelper.h"
+#include "gstxmlhelper.h"
+#include "gstmpdrootnode.h"
+#include "gstmpdbaseurlnode.h"
+#include "gstmpdutctimingnode.h"
+#include "gstmpdmetricsnode.h"
+#include "gstmpdmetricsrangenode.h"
+#include "gstmpdsnode.h"
+#include "gstmpdsegmenttimelinenode.h"
+#include "gstmpdsegmenttemplatenode.h"
+#include "gstmpdsegmenturlnode.h"
+#include "gstmpdsegmentlistnode.h"
+#include "gstmpdsegmentbasenode.h"
+#include "gstmpdperiodnode.h"
+#include "gstmpdrepresentationnode.h"
+#include "gstmpdsubrepresentationnode.h"
+#include "gstmpdcontentcomponentnode.h"
+#include "gstmpdadaptationsetnode.h"
+#include "gstmpdsubsetnode.h"
+#include "gstmpdprograminformationnode.h"
+#include "gstmpdlocationnode.h"
+#include "gstmpdreportingnode.h"
+#include "gstmpdurltypenode.h"
+#include "gstmpddescriptortypenode.h"
+#include "gstmpdrepresentationbasenode.h"
+#include "gstmpdmultsegmentbasenode.h"
+
+G_BEGIN_DECLS
+
+typedef struct _GstActiveStream           GstActiveStream;
+typedef struct _GstStreamPeriod           GstStreamPeriod;
+typedef struct _GstMediaFragmentInfo      GstMediaFragmentInfo;
+typedef struct _GstMediaSegment           GstMediaSegment;
+
+
+#define GST_MPD_DURATION_NONE ((guint64)-1)
+
+typedef enum
+{
+  GST_STREAM_UNKNOWN,
+  GST_STREAM_VIDEO,           /* video stream (the main one) */
+  GST_STREAM_AUDIO,           /* audio stream (optional) */
+  GST_STREAM_APPLICATION      /* application stream (optional): for timed text/subtitles */
+} GstStreamMimeType;
+
+/**
+ * GstStreamPeriod:
+ *
+ * Stream period data structure
+ */
+struct _GstStreamPeriod
+{
+  GstMPDPeriodNode *period;                      /* Stream period */
+  guint number;                               /* Period number */
+  GstClockTime start;                         /* Period start time */
+  GstClockTime duration;                      /* Period duration */
+};
+
+/**
+ * GstMediaSegment:
+ *
+ * Media segment data structure
+ */
+struct _GstMediaSegment
+{
+  GstMPDSegmentURLNode *SegmentURL;              /* this is NULL when using a SegmentTemplate */
+  guint number;                               /* segment number */
+  gint repeat;                                /* number of extra repetitions (0 = played only once) */
+  guint64 scale_start;                        /* start time in timescale units */
+  guint64 scale_duration;                     /* duration in timescale units */
+  GstClockTime start;                         /* segment start time */
+  GstClockTime duration;                      /* segment duration */
+};
+
+struct _GstMediaFragmentInfo
+{
+  gchar *uri;
+  gint64 range_start;
+  gint64 range_end;
+
+  gchar *index_uri;
+  gint64 index_range_start;
+  gint64 index_range_end;
+
+  gboolean discontinuity;
+  GstClockTime timestamp;
+  GstClockTime duration;
+};
+
+/**
+ * GstActiveStream:
+ *
+ * Active stream data structure
+ */
+struct _GstActiveStream
+{
+  GstStreamMimeType mimeType;                 /* video/audio/application */
+
+  guint baseURL_idx;                          /* index of the baseURL used for last request */
+  gchar *baseURL;                             /* active baseURL used for last request */
+  gchar *queryURL;                            /* active queryURL used for last request */
+  guint max_bandwidth;                        /* max bandwidth allowed for this mimeType */
+
+  GstMPDAdaptationSetNode *cur_adapt_set;        /* active adaptation set */
+  gint representation_idx;                    /* index of current representation */
+  GstMPDRepresentationNode *cur_representation;  /* active representation */
+  GstMPDSegmentBaseNode *cur_segment_base;       /* active segment base */
+  GstMPDSegmentListNode *cur_segment_list;       /* active segment list */
+  GstMPDSegmentTemplateNode *cur_seg_template;   /* active segment template */
+  gint segment_index;                         /* index of next sequence chunk */
+  guint segment_repeat_index;                 /* index of the repeat count of a segment */
+  GPtrArray *segments;                        /* array of GstMediaSegment */
+  GstClockTime presentationTimeOffset;        /* presentation time offset of the current segment */
+};
+
+/* MPD file parsing */
+gboolean gst_mpdparser_get_mpd_root_node (GstMPDRootNode ** mpd_root_node, const gchar * data, gint size);
+GstMPDSegmentListNode * gst_mpdparser_get_external_segment_list (const gchar * data, gint size, GstMPDSegmentListNode * parent);
+GList * gst_mpdparser_get_external_periods (const gchar * data, gint size);
+GList * gst_mpdparser_get_external_adaptation_sets (const gchar * data, gint size, GstMPDPeriodNode* period);
+
+/* navigation functions */
+GstStreamMimeType gst_mpdparser_representation_get_mimetype (GstMPDAdaptationSetNode * adapt_set, GstMPDRepresentationNode * rep);
+
+/* Memory management */
+void gst_mpdparser_free_stream_period (GstStreamPeriod * stream_period);
+void gst_mpdparser_free_media_segment (GstMediaSegment * media_segment);
+void gst_mpdparser_free_active_stream (GstActiveStream * active_stream);
+void gst_mpdparser_media_fragment_info_clear (GstMediaFragmentInfo * fragment);
+/* Active stream methods*/
+void gst_mpdparser_init_active_stream_segments (GstActiveStream * stream);
+gchar *gst_mpdparser_get_mediaURL (GstActiveStream * stream, GstMPDSegmentURLNode * segmentURL);
+const gchar *gst_mpdparser_get_initializationURL (GstActiveStream * stream, GstMPDURLTypeNode * InitializationURL);
+gchar *gst_mpdparser_build_URL_from_template (const gchar * url_template, const gchar * id, guint number, guint bandwidth, guint64 time);
+
+G_END_DECLS
+
+#endif /* __GST_MPDPARSER_H__ */
+
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdperiodnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdperiodnode.c
new file mode 100644 (file)
index 0000000..883c1e7
--- /dev/null
@@ -0,0 +1,214 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdperiodnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDPeriodNode2, gst_mpd_period_node, GST_TYPE_MPD_NODE);
+
+enum
+{
+  PROP_MPD_PERIOD_0,
+  PROP_MPD_PERIOD_ID,
+  PROP_MPD_PERIOD_START,
+  PROP_MPD_PERIOD_DURATION,
+  PROP_MPD_PERIOD_BITSTREAM_SWITCHING,
+};
+
+/* GObject VMethods */
+
+static void
+gst_mpd_period_node_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstMPDPeriodNode *self = GST_MPD_PERIOD_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_PERIOD_ID:
+      g_free (self->id);
+      self->id = g_value_dup_string (value);
+      break;
+    case PROP_MPD_PERIOD_START:
+      self->start = g_value_get_uint64 (value);
+      break;
+    case PROP_MPD_PERIOD_DURATION:
+      self->duration = g_value_get_uint64 (value);
+      break;
+    case PROP_MPD_PERIOD_BITSTREAM_SWITCHING:
+      self->bitstreamSwitching = g_value_get_boolean (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_period_node_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstMPDPeriodNode *self = GST_MPD_PERIOD_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_PERIOD_ID:
+      g_value_set_string (value, self->id);
+      break;
+    case PROP_MPD_PERIOD_START:
+      g_value_set_uint64 (value, self->start);
+      break;
+    case PROP_MPD_PERIOD_DURATION:
+      g_value_set_uint64 (value, self->duration);
+      break;
+    case PROP_MPD_PERIOD_BITSTREAM_SWITCHING:
+      g_value_set_boolean (value, self->bitstreamSwitching);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_period_node_finalize (GObject * object)
+{
+  GstMPDPeriodNode *self = GST_MPD_PERIOD_NODE (object);
+
+  if (self->id)
+    xmlFree (self->id);
+  gst_mpd_segment_base_node_free (self->SegmentBase);
+  gst_mpd_segment_list_node_free (self->SegmentList);
+  gst_mpd_segment_template_node_free (self->SegmentTemplate);
+  g_list_free_full (self->AdaptationSets,
+      (GDestroyNotify) gst_mpd_adaptation_set_node_free);
+  g_list_free_full (self->Subsets, (GDestroyNotify) gst_mpd_subset_node_free);
+  g_list_free_full (self->BaseURLs, (GDestroyNotify) gst_mpd_baseurl_node_free);
+  if (self->xlink_href)
+    xmlFree (self->xlink_href);
+
+  G_OBJECT_CLASS (gst_mpd_period_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_period_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr period_xml_node = NULL;
+  GstMPDPeriodNode *self = GST_MPD_PERIOD_NODE (node);
+
+  period_xml_node = xmlNewNode (NULL, (xmlChar *) "Period");
+
+  if (self->id)
+    gst_xml_helper_set_prop_string (period_xml_node, "id", self->id);
+
+  gst_xml_helper_set_prop_duration (period_xml_node, "start", self->start);
+  gst_xml_helper_set_prop_duration (period_xml_node, "duration",
+      self->duration);
+  gst_xml_helper_set_prop_boolean (period_xml_node, "bitstreamSwitching",
+      self->bitstreamSwitching);
+
+  if (self->SegmentBase)
+    gst_mpd_node_add_child_node (GST_MPD_NODE (self->SegmentBase),
+        period_xml_node);
+
+  if (self->SegmentList)
+    gst_mpd_mult_segment_base_node_add_child_node (GST_MPD_NODE
+        (self->SegmentList), period_xml_node);
+
+  if (self->SegmentTemplate)
+    gst_mpd_mult_segment_base_node_add_child_node (GST_MPD_NODE
+        (self->SegmentTemplate), period_xml_node);
+
+  g_list_foreach (self->AdaptationSets,
+      gst_mpd_representation_base_node_get_list_item, period_xml_node);
+  g_list_foreach (self->Subsets, gst_mpd_node_get_list_item, period_xml_node);
+  g_list_foreach (self->BaseURLs, gst_mpd_node_get_list_item, period_xml_node);
+
+
+  return period_xml_node;
+}
+
+static void
+gst_mpd_period_node_class_init (GstMPDPeriodNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_period_node_finalize;
+  object_class->set_property = gst_mpd_period_node_set_property;
+  object_class->get_property = gst_mpd_period_node_get_property;
+
+  m_klass->get_xml_node = gst_mpd_period_get_xml_node;
+
+  g_object_class_install_property (object_class, PROP_MPD_PERIOD_ID,
+      g_param_spec_string ("id", "id",
+          "unique id for period", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (object_class, PROP_MPD_PERIOD_START,
+      g_param_spec_uint64 ("start", "Period start",
+          "Period start",
+          0, G_MAXUINT64, 0,
+          (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+
+  g_object_class_install_property (object_class, PROP_MPD_PERIOD_DURATION,
+      g_param_spec_uint64 ("duration", "period duration",
+          "Period duration",
+          0, G_MAXUINT64, 0,
+          (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+
+  g_object_class_install_property (object_class,
+      PROP_MPD_PERIOD_BITSTREAM_SWITCHING,
+      g_param_spec_boolean ("bitstream-switching", "Bitstream switching",
+          "Bitstream switching", FALSE,
+          (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+}
+
+static void
+gst_mpd_period_node_init (GstMPDPeriodNode * self)
+{
+  self->id = NULL;
+  self->start = 0;              /* [ms] */
+  self->duration = 0;           /* [ms] */
+  self->bitstreamSwitching = 0;
+  self->SegmentBase = NULL;
+  self->SegmentList = NULL;
+  self->SegmentTemplate = NULL;
+  self->AdaptationSets = NULL;
+  self->Subsets = NULL;
+  self->BaseURLs = NULL;
+  self->xlink_href = NULL;
+  self->actuate = 0;
+}
+
+GstMPDPeriodNode *
+gst_mpd_period_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_PERIOD_NODE, NULL);
+}
+
+void
+gst_mpd_period_node_free (GstMPDPeriodNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdperiodnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdperiodnode.h
new file mode 100644 (file)
index 0000000..a33da11
--- /dev/null
@@ -0,0 +1,68 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDPERIODNODE_H__
+#define __GSTMPDPERIODNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+#include "gstmpdsegmentlistnode.h"
+#include "gstmpdsegmenttemplatenode.h"
+
+G_BEGIN_DECLS
+
+struct _GstSegmentTemplateNode;
+
+#define GST_TYPE_MPD_PERIOD_NODE gst_mpd_period_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDPeriodNode2, gst_mpd_period_node, GST, MPD_PERIOD_NODE, GstMPDNode)
+
+typedef GstMPDPeriodNode2 GstMPDPeriodNode;
+typedef GstMPDPeriodNode2Class GstMPDPeriodNodeClass;
+
+struct _GstMPDPeriodNode2
+{
+  GstObject parent_instance;
+  gchar *id;
+  guint64 start;                     /* [ms] */
+  guint64 duration;                  /* [ms] */
+  gboolean bitstreamSwitching;
+  /* SegmentBase node */
+  GstMPDSegmentBaseNode *SegmentBase;
+  /* SegmentList node */
+  GstMPDSegmentListNode *SegmentList;
+  /* SegmentTemplate node */
+  GstMPDSegmentTemplateNode *SegmentTemplate;
+  /* list of Adaptation Set nodes */
+  GList *AdaptationSets;
+  /* list of Representation nodes */
+  GList *Subsets;
+  /* list of BaseURL nodes */
+  GList *BaseURLs;
+
+  gchar *xlink_href;
+  int actuate;
+};
+
+GstMPDPeriodNode * gst_mpd_period_node_new (void);
+void gst_mpd_period_node_free (GstMPDPeriodNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDPERIODNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdprograminformationnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdprograminformationnode.c
new file mode 100644 (file)
index 0000000..999872b
--- /dev/null
@@ -0,0 +1,125 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdprograminformationnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDProgramInformationNode2, gst_mpd_program_information_node,
+    GST_TYPE_MPD_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_program_information_node_finalize (GObject * object)
+{
+  GstMPDProgramInformationNode *self =
+      GST_MPD_PROGRAM_INFORMATION_NODE (object);
+
+  if (self->lang)
+    xmlFree (self->lang);
+  if (self->moreInformationURL)
+    xmlFree (self->moreInformationURL);
+  if (self->Title)
+    xmlFree (self->Title);
+  if (self->Source)
+    xmlFree (self->Source);
+  if (self->Copyright)
+    xmlFree (self->Copyright);
+
+  G_OBJECT_CLASS (gst_mpd_program_information_node_parent_class)->finalize
+      (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_program_information_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr program_info_xml_node = NULL;
+  xmlNodePtr child_node = NULL;
+  GstMPDProgramInformationNode *self = GST_MPD_PROGRAM_INFORMATION_NODE (node);
+
+  program_info_xml_node = xmlNewNode (NULL, (xmlChar *) "ProgramInformation");
+
+  if (self->lang)
+    gst_xml_helper_set_prop_string (program_info_xml_node, "lang", self->lang);
+
+  if (self->moreInformationURL)
+    gst_xml_helper_set_prop_string (program_info_xml_node, "moreInformationURL",
+        self->moreInformationURL);
+
+  if (self->Title) {
+    child_node = xmlNewNode (NULL, (xmlChar *) "Title");
+    gst_xml_helper_set_content (child_node, self->Title);
+    xmlAddChild (program_info_xml_node, child_node);
+  }
+
+  if (self->Source) {
+    child_node = xmlNewNode (NULL, (xmlChar *) "Source");
+    gst_xml_helper_set_content (child_node, self->Source);
+    xmlAddChild (program_info_xml_node, child_node);
+  }
+
+  if (self->Copyright) {
+    child_node = xmlNewNode (NULL, (xmlChar *) "Copyright");
+    gst_xml_helper_set_content (child_node, self->Copyright);
+    xmlAddChild (program_info_xml_node, child_node);
+  }
+
+  return program_info_xml_node;
+}
+
+static void
+gst_mpd_program_information_node_class_init (GstMPDProgramInformationNodeClass *
+    klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_program_information_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_program_information_get_xml_node;
+}
+
+static void
+gst_mpd_program_information_node_init (GstMPDProgramInformationNode * self)
+{
+  self->lang = NULL;
+  self->moreInformationURL = NULL;
+  self->Title = NULL;
+  self->Source = NULL;
+  self->Copyright = NULL;
+}
+
+GstMPDProgramInformationNode *
+gst_mpd_program_information_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_PROGRAM_INFORMATION_NODE, NULL);
+}
+
+void
+gst_mpd_program_information_node_free (GstMPDProgramInformationNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdprograminformationnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdprograminformationnode.h
new file mode 100644 (file)
index 0000000..a6808da
--- /dev/null
@@ -0,0 +1,51 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDPROGRAMINFORMATIONNODE_H__
+#define __GSTMPDPROGRAMINFORMATIONNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_PROGRAM_INFORMATION_NODE gst_mpd_program_information_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDProgramInformationNode2, gst_mpd_program_information_node, GST, MPD_PROGRAM_INFORMATION_NODE, GstMPDNode)
+
+typedef GstMPDProgramInformationNode2 GstMPDProgramInformationNode;
+typedef GstMPDProgramInformationNode2Class GstMPDProgramInformationNodeClass;
+
+struct _GstMPDProgramInformationNode2
+{
+  GstObject parent_instance;
+  gchar *lang;                      /* LangVectorType RFC 5646 */
+  gchar *moreInformationURL;
+  /* children nodes */
+  gchar *Title;
+  gchar *Source;
+  gchar *Copyright;
+};
+
+GstMPDProgramInformationNode * gst_mpd_program_information_node_new (void);
+void gst_mpd_program_information_node_free (GstMPDProgramInformationNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDPROGRAMINFORMATIONNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdreportingnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdreportingnode.c
new file mode 100644 (file)
index 0000000..fc08c58
--- /dev/null
@@ -0,0 +1,64 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdreportingnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDReportingNode2, gst_mpd_reporting_node, GST_TYPE_MPD_NODE);
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_reporting_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr reporting_xml_node = NULL;
+
+  reporting_xml_node = xmlNewNode (NULL, (xmlChar *) "Reporting");
+
+  return reporting_xml_node;
+}
+
+static void
+gst_mpd_reporting_node_class_init (GstMPDReportingNodeClass * klass)
+{
+  GstMPDNodeClass *m_klass;
+
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  m_klass->get_xml_node = gst_mpd_reporting_get_xml_node;
+}
+
+static void
+gst_mpd_reporting_node_init (GstMPDReportingNode * self)
+{
+}
+
+GstMPDReportingNode *
+gst_mpd_reporting_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_REPORTING_NODE, NULL);
+}
+
+void
+gst_mpd_reporting_node_free (GstMPDReportingNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdreportingnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdreportingnode.h
new file mode 100644 (file)
index 0000000..80e6430
--- /dev/null
@@ -0,0 +1,45 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDREPORTINGNODE_H__
+#define __GSTMPDREPORTINGNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_REPORTING_NODE gst_mpd_reporting_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDReportingNode2, gst_mpd_reporting_node, GST, MPD_REPORTING_NODE, GstMPDNode)
+
+typedef GstMPDReportingNode2 GstMPDReportingNode;
+typedef GstMPDReportingNode2Class GstMPDReportingNodeClass;
+
+struct _GstMPDReportingNode2
+{
+  GstObject     parent_instance;
+};
+
+GstMPDReportingNode * gst_mpd_reporting_node_new (void);
+void gst_mpd_reporting_node_free (GstMPDReportingNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDREPORTINGNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationbasenode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationbasenode.c
new file mode 100644 (file)
index 0000000..1f9cb5e
--- /dev/null
@@ -0,0 +1,351 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdrepresentationbasenode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDRepresentationBaseNode2, gst_mpd_representation_base_node,
+    GST_TYPE_MPD_NODE);
+
+enum
+{
+  PROP_MPD_REPRESENTATION_BASE_0 = 100,
+  PROP_MPD_REPRESENTATION_BASE_PROFILES,
+  PROP_MPD_REPRESENTATION_BASE_WIDTH,
+  PROP_MPD_REPRESENTATION_BASE_HEIGHT,
+  PROP_MPD_REPRESENTATION_BASE_SAR,
+  PROP_MPD_REPRESENTATION_BASE_MIN_FRAME_RATE,
+  PROP_MPD_REPRESENTATION_BASE_MAX_FRAME_RATE,
+  PROP_MPD_REPRESENTATION_BASE_FRAME_RATE,
+  PROP_MPD_REPRESENTATION_BASE_AUDIO_SAMPLING_RATE,
+  PROP_MPD_REPRESENTATION_BASE_MIMETYPE,
+  PROP_MPD_REPRESENTATION_BASE_SEGMENT_PROFILES,
+  PROP_MPD_REPRESENTATION_BASE_CODECS,
+  PROP_MPD_REPRESENTATION_BASE_MAX_SAP_PERIOD,
+  PROP_MPD_REPRESENTATION_BASE_START_WITH_SAP,
+  PROP_MPD_REPRESENTATION_BASE_MAX_PLAYOUT_RATE,
+  PROP_MPD_REPRESENTATION_BASE_CODING_DEPENDENCY,
+  PROP_MPD_REPRESENTATION_BASE_SCAN_TYPE,
+};
+
+/* GObject VMethods */
+
+static void
+gst_mpd_representation_base_node_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstMPDRepresentationBaseNode *self =
+      GST_MPD_REPRESENTATION_BASE_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_REPRESENTATION_BASE_PROFILES:
+      g_free (self->profiles);
+      self->profiles = g_value_dup_string (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_WIDTH:
+      self->width = g_value_get_uint (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_HEIGHT:
+      self->height = g_value_get_uint (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_SAR:
+      g_slice_free (GstXMLRatio, self->sar);
+      self->sar = gst_xml_helper_clone_ratio (g_value_get_pointer (value));
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_MIN_FRAME_RATE:
+      g_slice_free (GstXMLFrameRate, self->minFrameRate);
+      self->minFrameRate =
+          gst_xml_helper_clone_frame_rate (g_value_get_pointer (value));
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_MAX_FRAME_RATE:
+      g_slice_free (GstXMLFrameRate, self->maxFrameRate);
+      self->maxFrameRate =
+          gst_xml_helper_clone_frame_rate (g_value_get_pointer (value));
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_FRAME_RATE:
+      g_slice_free (GstXMLFrameRate, self->frameRate);
+      self->frameRate =
+          gst_xml_helper_clone_frame_rate (g_value_get_pointer (value));
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_AUDIO_SAMPLING_RATE:
+      g_free (self->audioSamplingRate);
+      self->audioSamplingRate =
+          g_strdup_printf ("%u", g_value_get_uint (value));
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_MIMETYPE:
+      g_free (self->mimeType);
+      self->mimeType = g_value_dup_string (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_SEGMENT_PROFILES:
+      g_free (self->segmentProfiles);
+      self->segmentProfiles = g_value_dup_string (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_CODECS:
+      g_free (self->codecs);
+      self->codecs = g_value_dup_string (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_MAX_SAP_PERIOD:
+      self->maximumSAPPeriod = g_value_get_double (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_START_WITH_SAP:
+      self->startWithSAP = g_value_get_int (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_MAX_PLAYOUT_RATE:
+      self->maxPlayoutRate = g_value_get_double (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_CODING_DEPENDENCY:
+      self->codingDependency = g_value_get_boolean (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_SCAN_TYPE:
+      g_free (self->scanType);
+      self->scanType = g_value_dup_string (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_representation_base_node_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstMPDRepresentationBaseNode *self =
+      GST_MPD_REPRESENTATION_BASE_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_REPRESENTATION_BASE_PROFILES:
+      g_value_set_string (value, self->profiles);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_WIDTH:
+      g_value_set_uint (value, self->width);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_HEIGHT:
+      g_value_set_uint (value, self->height);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_SAR:
+      g_value_set_pointer (value, self->sar);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_MIN_FRAME_RATE:
+      g_value_set_pointer (value, self->minFrameRate);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_MAX_FRAME_RATE:
+      g_value_set_pointer (value, self->maxFrameRate);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_FRAME_RATE:
+      g_value_set_pointer (value, self->frameRate);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_AUDIO_SAMPLING_RATE:
+      g_value_set_uint (value, atoi (self->audioSamplingRate));
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_MIMETYPE:
+      g_value_set_string (value, self->mimeType);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_SEGMENT_PROFILES:
+      g_value_set_string (value, self->segmentProfiles);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_CODECS:
+      g_value_set_string (value, self->codecs);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_MAX_SAP_PERIOD:
+      g_value_set_double (value, self->maximumSAPPeriod);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_START_WITH_SAP:
+      g_value_set_int (value, self->startWithSAP);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_MAX_PLAYOUT_RATE:
+      g_value_set_double (value, self->maxPlayoutRate);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_CODING_DEPENDENCY:
+      g_value_set_boolean (value, self->codingDependency);
+      self->codingDependency = g_value_get_boolean (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BASE_SCAN_TYPE:
+      g_value_set_string (value, self->scanType);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_representation_base_node_finalize (GObject * object)
+{
+  GstMPDRepresentationBaseNode *self =
+      GST_MPD_REPRESENTATION_BASE_NODE (object);
+
+  if (self->profiles)
+    xmlFree (self->profiles);
+  g_slice_free (GstXMLRatio, self->sar);
+  g_slice_free (GstXMLFrameRate, self->frameRate);
+  g_slice_free (GstXMLFrameRate, self->minFrameRate);
+  g_slice_free (GstXMLFrameRate, self->maxFrameRate);
+  if (self->audioSamplingRate)
+    xmlFree (self->audioSamplingRate);
+  if (self->mimeType)
+    xmlFree (self->mimeType);
+  if (self->segmentProfiles)
+    xmlFree (self->segmentProfiles);
+  if (self->codecs)
+    xmlFree (self->codecs);
+  if (self->scanType)
+    xmlFree (self->scanType);
+  g_list_free_full (self->FramePacking,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+  g_list_free_full (self->AudioChannelConfiguration,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+  g_list_free_full (self->ContentProtection,
+      (GDestroyNotify) gst_mpd_descriptor_type_node_free);
+
+  if (self->caps)
+    gst_caps_unref (self->caps);
+
+  G_OBJECT_CLASS (gst_mpd_representation_base_node_parent_class)->finalize
+      (object);
+}
+
+/* Base class */
+
+static void
+gst_mpd_representation_base_get_xml_node (GstMPDNode * node,
+    xmlNodePtr representation_base_node)
+{
+  GstMPDRepresentationBaseNode *self = GST_MPD_REPRESENTATION_BASE_NODE (node);
+
+  if (self->profiles)
+    gst_xml_helper_set_prop_string (representation_base_node, "profiles",
+        self->profiles);
+  if (self->width)
+    gst_xml_helper_set_prop_uint (representation_base_node, "width",
+        self->width);
+  if (self->height)
+    gst_xml_helper_set_prop_uint (representation_base_node, "height",
+        self->height);
+
+  gst_xml_helper_set_prop_ratio (representation_base_node, "sar", self->sar);
+  gst_xml_helper_set_prop_framerate (representation_base_node, "minFrameRate",
+      self->minFrameRate);
+  gst_xml_helper_set_prop_framerate (representation_base_node, "maxFrameRate",
+      self->maxFrameRate);
+  gst_xml_helper_set_prop_framerate (representation_base_node, "frameRate",
+      self->frameRate);
+
+  gst_xml_helper_set_prop_string (representation_base_node,
+      "audioSamplingRate", self->audioSamplingRate);
+  gst_xml_helper_set_prop_string (representation_base_node, "mimeType",
+      self->mimeType);
+  gst_xml_helper_set_prop_string (representation_base_node, "segmentProfiles",
+      self->segmentProfiles);
+  gst_xml_helper_set_prop_string (representation_base_node, "codecs",
+      self->codecs);
+  if (self->maximumSAPPeriod)
+    gst_xml_helper_set_prop_double (representation_base_node,
+        "maximumSAPPeriod", self->maximumSAPPeriod);
+  if (self->startWithSAP)
+    gst_xml_helper_set_prop_int (representation_base_node, "startWithSAP",
+        self->startWithSAP);
+  if (self->maxPlayoutRate)
+    gst_xml_helper_set_prop_double (representation_base_node, "maxPlayoutRate",
+        self->maxPlayoutRate);
+  if (self->codingDependency)
+    gst_xml_helper_set_prop_boolean (representation_base_node,
+        "codingDependency", self->codingDependency);
+
+  gst_xml_helper_set_prop_string (representation_base_node, "scanType",
+      self->scanType);
+
+  g_list_foreach (self->FramePacking,
+      gst_mpd_node_get_list_item, representation_base_node);
+  g_list_foreach (self->AudioChannelConfiguration,
+      gst_mpd_node_get_list_item, representation_base_node);
+  g_list_foreach (self->ContentProtection,
+      gst_mpd_node_get_list_item, representation_base_node);
+}
+
+static void
+gst_mpd_representation_base_node_class_init (GstMPDRepresentationBaseNodeClass *
+    klass)
+{
+  GObjectClass *object_class;
+
+  object_class = G_OBJECT_CLASS (klass);
+
+  object_class->finalize = gst_mpd_representation_base_node_finalize;
+  object_class->set_property = gst_mpd_representation_base_node_set_property;
+  object_class->get_property = gst_mpd_representation_base_node_get_property;
+
+
+  g_object_class_install_property (object_class,
+      PROP_MPD_REPRESENTATION_BASE_WIDTH, g_param_spec_uint ("width",
+          "width", "representation width", 0, G_MAXUINT, 0,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_REPRESENTATION_BASE_HEIGHT, g_param_spec_uint ("height",
+          "height", "representation height", 0, G_MAXUINT, 0,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_REPRESENTATION_BASE_MIMETYPE, g_param_spec_string ("mime-type",
+          "mimetype", "representation mimetype", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_REPRESENTATION_BASE_CODECS, g_param_spec_string ("codecs",
+          "codecs", "representation codec", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_REPRESENTATION_BASE_AUDIO_SAMPLING_RATE,
+      g_param_spec_uint ("audio-sampling-rate", "audio sampling rate",
+          "representation audio sampling rate", 0, G_MAXUINT, 0,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_mpd_representation_base_node_init (GstMPDRepresentationBaseNode * self)
+{
+  self->profiles = NULL;
+  self->width = 0;
+  self->height = 0;
+  self->sar = NULL;
+  self->minFrameRate = NULL;
+  self->maxFrameRate = NULL;
+  self->frameRate = NULL;
+  self->audioSamplingRate = NULL;
+  self->mimeType = NULL;
+  self->segmentProfiles = NULL;
+  self->codecs = NULL;
+  self->maximumSAPPeriod = 0;
+  self->startWithSAP = GST_SAP_TYPE_0;
+  self->maxPlayoutRate = 0.0;
+  self->codingDependency = FALSE;
+  self->scanType = NULL;
+  self->FramePacking = NULL;
+  self->AudioChannelConfiguration = NULL;
+  self->ContentProtection = NULL;
+}
+
+void
+gst_mpd_representation_base_node_get_list_item (gpointer data,
+    gpointer user_data)
+{
+  GstMPDNode *node = (GstMPDNode *) data;
+  xmlNodePtr parent_xml_node = (xmlNodePtr) user_data;
+  xmlNodePtr new_xml_node = gst_mpd_node_get_xml_pointer (node);
+
+  gst_mpd_representation_base_get_xml_node (node, new_xml_node);
+  xmlAddChild (parent_xml_node, new_xml_node);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationbasenode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationbasenode.h
new file mode 100644 (file)
index 0000000..c090ca3
--- /dev/null
@@ -0,0 +1,68 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDREPRESENTATIONBASENODE_H__
+#define __GSTMPDREPRESENTATIONBASENODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_REPRESENTATION_BASE_NODE gst_mpd_representation_base_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDRepresentationBaseNode2, gst_mpd_representation_base_node, GST, MPD_REPRESENTATION_BASE_NODE, GstMPDNode)
+
+typedef GstMPDRepresentationBaseNode2 GstMPDRepresentationBaseNode;
+typedef GstMPDRepresentationBaseNode2Class GstMPDRepresentationBaseNodeClass;
+
+#define glib_autoptr_clear_GstMPDRepresentationBaseNode glib_autoptr_clear_GstMPDRepresentationBaseNode2
+
+struct _GstMPDRepresentationBaseNode2
+{
+  GstObject     base;
+  gchar *profiles;
+  guint width;
+  guint height;
+  GstXMLRatio *sar;
+  GstXMLFrameRate *minFrameRate;
+  GstXMLFrameRate *maxFrameRate;
+  GstXMLFrameRate *frameRate;
+  gchar *audioSamplingRate;
+  gchar *mimeType;
+  gchar *segmentProfiles;
+  gchar *codecs;
+  GstCaps *caps;
+  gdouble maximumSAPPeriod;
+  GstMPDSAPType startWithSAP;
+  gdouble maxPlayoutRate;
+  gboolean codingDependency;
+  gchar *scanType;
+  /* list of FramePacking DescriptorType nodes */
+  GList *FramePacking;
+  /* list of AudioChannelConfiguration DescriptorType nodes */
+  GList *AudioChannelConfiguration;
+  /* list of ContentProtection DescriptorType nodes */
+  GList *ContentProtection;
+};
+
+
+void gst_mpd_representation_base_node_get_list_item (gpointer data, gpointer user_data);
+
+G_END_DECLS
+#endif /* __GSTMPDREPRESENTATIONBASENODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationnode.c
new file mode 100644 (file)
index 0000000..69dab28
--- /dev/null
@@ -0,0 +1,198 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdrepresentationnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDRepresentationNode2, gst_mpd_representation_node,
+    GST_TYPE_MPD_REPRESENTATION_BASE_NODE);
+
+enum
+{
+  PROP_MPD_REPRESENTATION_0,
+  PROP_MPD_REPRESENTATION_ID,
+  PROP_MPD_REPRESENTATION_BANDWIDTH,
+  PROP_MPD_REPRESENTATION_QUALITY_RANKING,
+};
+
+/* GObject VMethods */
+
+static void
+gst_mpd_representation_node_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstMPDRepresentationNode *self = GST_MPD_REPRESENTATION_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_REPRESENTATION_ID:
+      g_free (self->id);
+      self->id = g_value_dup_string (value);
+      break;
+    case PROP_MPD_REPRESENTATION_BANDWIDTH:
+      self->bandwidth = g_value_get_uint (value);
+      break;
+    case PROP_MPD_REPRESENTATION_QUALITY_RANKING:
+      self->qualityRanking = g_value_get_uint (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_representation_node_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstMPDRepresentationNode *self = GST_MPD_REPRESENTATION_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_REPRESENTATION_ID:
+      g_value_set_string (value, self->id);
+      break;
+    case PROP_MPD_REPRESENTATION_BANDWIDTH:
+      g_value_set_uint (value, self->bandwidth);
+      break;
+    case PROP_MPD_REPRESENTATION_QUALITY_RANKING:
+      g_value_set_uint (value, self->qualityRanking);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_representation_node_finalize (GObject * object)
+{
+  GstMPDRepresentationNode *self = GST_MPD_REPRESENTATION_NODE (object);
+
+  if (self->id)
+    xmlFree (self->id);
+  g_strfreev (self->dependencyId);
+  g_strfreev (self->mediaStreamStructureId);
+  g_list_free_full (self->SubRepresentations,
+      (GDestroyNotify) gst_mpd_sub_representation_node_free);
+  gst_mpd_segment_base_node_free (self->SegmentBase);
+  gst_mpd_segment_template_node_free (self->SegmentTemplate);
+  gst_mpd_segment_list_node_free (self->SegmentList);
+  g_list_free_full (self->BaseURLs, (GDestroyNotify) gst_mpd_baseurl_node_free);
+
+  G_OBJECT_CLASS (gst_mpd_representation_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_representation_get_xml_node (GstMPDNode * node)
+{
+  gchar *value;
+  xmlNodePtr representation_xml_node = NULL;
+  GstMPDRepresentationNode *self = GST_MPD_REPRESENTATION_NODE (node);
+
+  representation_xml_node = xmlNewNode (NULL, (xmlChar *) "Representation");
+
+  gst_xml_helper_set_prop_string (representation_xml_node, "id", self->id);
+  gst_xml_helper_set_prop_uint (representation_xml_node, "bandwidth",
+      self->bandwidth);
+  if (self->qualityRanking)
+    gst_xml_helper_set_prop_uint (representation_xml_node, "qualityRanking",
+        self->qualityRanking);
+
+
+  if (self->dependencyId) {
+    value = g_strjoinv (" ", self->dependencyId);
+    gst_xml_helper_set_prop_string (representation_xml_node, "dependencyId",
+        value);
+    g_free (value);
+  }
+  if (self->mediaStreamStructureId) {
+    value = g_strjoinv (" ", self->mediaStreamStructureId);
+    gst_xml_helper_set_prop_string (representation_xml_node,
+        "mediaStreamStructureId", value);
+    g_free (value);
+  }
+
+  g_list_foreach (self->BaseURLs, gst_mpd_node_get_list_item,
+      representation_xml_node);
+  g_list_foreach (self->SubRepresentations,
+      gst_mpd_representation_base_node_get_list_item, representation_xml_node);
+
+  gst_mpd_node_add_child_node (GST_MPD_NODE (self->SegmentBase),
+      representation_xml_node);
+  gst_mpd_mult_segment_base_node_add_child_node (GST_MPD_NODE
+      (self->SegmentTemplate), representation_xml_node);
+  gst_mpd_mult_segment_base_node_add_child_node (GST_MPD_NODE
+      (self->SegmentList), representation_xml_node);
+
+  return representation_xml_node;
+}
+
+static void
+gst_mpd_representation_node_class_init (GstMPDRepresentationNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_representation_node_finalize;
+  object_class->set_property = gst_mpd_representation_node_set_property;
+  object_class->get_property = gst_mpd_representation_node_get_property;
+
+  m_klass->get_xml_node = gst_mpd_representation_get_xml_node;
+
+  g_object_class_install_property (object_class,
+      PROP_MPD_REPRESENTATION_BANDWIDTH, g_param_spec_uint ("bandwidth",
+          "bandwidth", "representation bandwidth", 0, G_MAXUINT, 0,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_REPRESENTATION_QUALITY_RANKING,
+      g_param_spec_uint ("quality-ranking", "quality ranking",
+          "representation quality ranking", 0, G_MAXUINT, 0,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_mpd_representation_node_init (GstMPDRepresentationNode * self)
+{
+  self->id = NULL;
+  self->bandwidth = 0;
+  self->qualityRanking = 0;
+  self->dependencyId = NULL;
+  self->mediaStreamStructureId = NULL;
+  self->BaseURLs = NULL;
+  self->SubRepresentations = NULL;
+  self->SegmentBase = NULL;
+  self->SegmentTemplate = NULL;
+  self->SegmentList = NULL;
+}
+
+GstMPDRepresentationNode *
+gst_mpd_representation_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_REPRESENTATION_NODE, NULL);
+}
+
+void
+gst_mpd_representation_node_free (GstMPDRepresentationNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrepresentationnode.h
new file mode 100644 (file)
index 0000000..3376584
--- /dev/null
@@ -0,0 +1,64 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDREPRESENTATIONNODE_H__
+#define __GSTMPDREPRESENTATIONNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+#include "gstmpdrepresentationbasenode.h"
+#include "gstmpdsegmentlistnode.h"
+#include "gstmpdsegmenttemplatenode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_REPRESENTATION_NODE gst_mpd_representation_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDRepresentationNode2, gst_mpd_representation_node, GST, MPD_REPRESENTATION_NODE, GstMPDRepresentationBaseNode)
+
+typedef GstMPDRepresentationNode2 GstMPDRepresentationNode;
+typedef GstMPDRepresentationNode2Class GstMPDRepresentationNodeClass;
+
+struct _GstMPDRepresentationNode2
+{
+  GstMPDRepresentationBaseNode parent_instance;
+  gchar *id;
+  guint bandwidth;
+  guint qualityRanking;
+  gchar **dependencyId;              /* StringVectorType */
+  gchar **mediaStreamStructureId;    /* StringVectorType */
+  /* list of BaseURL nodes */
+  GList *BaseURLs;
+  /* list of SubRepresentation nodes */
+  GList *SubRepresentations;
+  /* SegmentBase node */
+  GstMPDSegmentBaseNode *SegmentBase;
+  /* SegmentTemplate node */
+  GstMPDSegmentTemplateNode *SegmentTemplate;
+  /* SegmentList node */
+  GstMPDSegmentListNode *SegmentList;
+};
+
+
+GstMPDRepresentationNode * gst_mpd_representation_node_new (void);
+void gst_mpd_representation_node_free (GstMPDRepresentationNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDREPRESENTATIONNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrootnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrootnode.c
new file mode 100644 (file)
index 0000000..69ca6b6
--- /dev/null
@@ -0,0 +1,405 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdrootnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDRootNode2, gst_mpd_root_node, GST_TYPE_MPD_NODE);
+
+enum
+{
+  PROP_MPD_ROOT_0,
+  PROP_MPD_ROOT_DEFAULT_NAMESPACE,
+  PROP_MPD_ROOT_NAMESPACE_XSI,
+  PROP_MPD_ROOT_NAMESPACE_EXT,
+  PROP_MPD_ROOT_SCHEMA_LOCATION,
+  PROP_MPD_ROOT_ID,
+  PROP_MPD_ROOT_PROFILES,
+  PROP_MPD_ROOT_TYPE,
+  PROP_MPD_ROOT_PUBLISH_TIME,
+  PROP_MPD_ROOT_AVAILABILTY_START_TIME,
+  PROP_MPD_ROOT_AVAILABILTY_END_TIME,
+  PROP_MPD_ROOT_MEDIA_PRESENTATION_DURATION,
+  PROP_MPD_ROOT_MINIMUM_UPDATE_PERIOD,
+  PROP_MPD_ROOT_MIN_BUFFER_TIME,
+  PROP_MPD_ROOT_TIMESHIFT_BUFFER_DEPTH,
+  PROP_MPD_ROOT_SUGGESTED_PRESENTATION_DELAY,
+  PROP_MPD_ROOT_MAX_SEGMENT_DURATION,
+  PROP_MPD_ROOT_MAX_SUBSEGMENT_DURATION,
+};
+
+/* GObject VMethods */
+
+static void
+gst_mpd_root_node_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstMPDRootNode *self = GST_MPD_ROOT_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_ROOT_DEFAULT_NAMESPACE:
+      g_free (self->default_namespace);
+      self->default_namespace = g_value_dup_string (value);
+      break;
+    case PROP_MPD_ROOT_NAMESPACE_XSI:
+      g_free (self->namespace_xsi);
+      self->namespace_xsi = g_value_dup_string (value);
+      break;
+    case PROP_MPD_ROOT_NAMESPACE_EXT:
+      g_free (self->namespace_ext);
+      self->namespace_ext = g_value_dup_string (value);
+      break;
+    case PROP_MPD_ROOT_SCHEMA_LOCATION:
+      g_free (self->schemaLocation);
+      self->schemaLocation = g_value_dup_string (value);
+      break;
+    case PROP_MPD_ROOT_ID:
+      g_free (self->id);
+      self->id = g_value_dup_string (value);
+      break;
+    case PROP_MPD_ROOT_PROFILES:
+      g_free (self->profiles);
+      self->profiles = g_value_dup_string (value);
+      break;
+    case PROP_MPD_ROOT_TYPE:
+      self->type = (GstMPDFileType) g_value_get_int (value);
+      break;
+    case PROP_MPD_ROOT_AVAILABILTY_START_TIME:
+      if (self->availabilityStartTime)
+        gst_date_time_unref (self->availabilityStartTime);
+      self->availabilityStartTime = g_value_dup_boxed (value);
+      break;
+    case PROP_MPD_ROOT_AVAILABILTY_END_TIME:
+      if (self->availabilityEndTime)
+        gst_date_time_unref (self->availabilityEndTime);
+      self->availabilityEndTime = g_value_dup_boxed (value);
+      break;
+    case PROP_MPD_ROOT_PUBLISH_TIME:
+      if (self->publishTime)
+        gst_date_time_unref (self->publishTime);
+      self->publishTime = g_value_dup_boxed (value);
+      break;
+    case PROP_MPD_ROOT_MEDIA_PRESENTATION_DURATION:
+      self->mediaPresentationDuration = g_value_get_uint64 (value);
+      break;
+    case PROP_MPD_ROOT_MINIMUM_UPDATE_PERIOD:
+      self->minimumUpdatePeriod = g_value_get_uint64 (value);
+      break;
+    case PROP_MPD_ROOT_MIN_BUFFER_TIME:
+      self->minBufferTime = g_value_get_uint64 (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_root_node_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstMPDRootNode *self = GST_MPD_ROOT_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_ROOT_DEFAULT_NAMESPACE:
+      g_value_set_string (value, self->default_namespace);
+      break;
+    case PROP_MPD_ROOT_NAMESPACE_XSI:
+      g_value_set_string (value, self->namespace_xsi);
+      break;
+    case PROP_MPD_ROOT_NAMESPACE_EXT:
+      g_value_set_string (value, self->namespace_ext);
+      break;
+    case PROP_MPD_ROOT_SCHEMA_LOCATION:
+      g_value_set_string (value, self->schemaLocation);
+      break;
+    case PROP_MPD_ROOT_ID:
+      g_value_set_string (value, self->id);
+      break;
+    case PROP_MPD_ROOT_PROFILES:
+      g_value_set_string (value, self->profiles);
+      break;
+    case PROP_MPD_ROOT_TYPE:
+      g_value_set_int (value, self->type);
+      break;
+    case PROP_MPD_ROOT_AVAILABILTY_START_TIME:
+      g_value_set_boxed (value, self->availabilityStartTime);
+      break;
+    case PROP_MPD_ROOT_AVAILABILTY_END_TIME:
+      g_value_set_boxed (value, self->availabilityEndTime);
+      break;
+    case PROP_MPD_ROOT_PUBLISH_TIME:
+      g_value_set_boxed (value, self->publishTime);
+      break;
+    case PROP_MPD_ROOT_MEDIA_PRESENTATION_DURATION:
+      g_value_set_uint64 (value, self->mediaPresentationDuration);
+      break;
+    case PROP_MPD_ROOT_MINIMUM_UPDATE_PERIOD:
+      g_value_set_uint64 (value, self->minimumUpdatePeriod);
+      break;
+    case PROP_MPD_ROOT_MIN_BUFFER_TIME:
+      g_value_set_uint64 (value, self->minBufferTime);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_root_node_finalize (GObject * object)
+{
+  GstMPDRootNode *self = GST_MPD_ROOT_NODE (object);
+
+  g_free (self->default_namespace);
+  g_free (self->namespace_xsi);
+  g_free (self->namespace_ext);
+  g_free (self->schemaLocation);
+  g_free (self->id);
+  g_free (self->profiles);
+
+  if (self->availabilityStartTime)
+    gst_date_time_unref (self->availabilityStartTime);
+  if (self->availabilityEndTime)
+    gst_date_time_unref (self->availabilityEndTime);
+  if (self->publishTime)
+    gst_date_time_unref (self->publishTime);
+
+  g_list_free_full (self->ProgramInfos,
+      (GDestroyNotify) gst_mpd_program_information_node_free);
+  g_list_free_full (self->BaseURLs, (GDestroyNotify) gst_mpd_baseurl_node_free);
+  g_list_free_full (self->Locations,
+      (GDestroyNotify) gst_mpd_location_node_free);
+  g_list_free_full (self->Periods, (GDestroyNotify) gst_mpd_period_node_free);
+  g_list_free_full (self->Metrics, (GDestroyNotify) gst_mpd_metrics_node_free);
+  g_list_free_full (self->UTCTimings,
+      (GDestroyNotify) gst_mpd_utctiming_node_free);
+
+
+  G_OBJECT_CLASS (gst_mpd_root_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_root_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr root_xml_node;
+  GstMPDRootNode *self = GST_MPD_ROOT_NODE (node);
+
+  root_xml_node = xmlNewNode (NULL, (xmlChar *) "MPD");
+
+  gst_xml_helper_set_prop_string (root_xml_node, "xmlns",
+      self->default_namespace);
+  gst_xml_helper_set_prop_string (root_xml_node, "profiles", self->profiles);
+  gst_xml_helper_set_prop_string (root_xml_node, "schemaLocation",
+      self->schemaLocation);
+  gst_xml_helper_set_prop_string (root_xml_node, "xmlns:xsi",
+      self->namespace_xsi);
+  gst_xml_helper_set_prop_string (root_xml_node, "xmlns:ext",
+      self->namespace_ext);
+  gst_xml_helper_set_prop_string (root_xml_node, "id", self->id);
+
+  if (self->type == GST_MPD_FILE_TYPE_STATIC)
+    gst_xml_helper_set_prop_string (root_xml_node, "type", (gchar *) "static");
+  else
+    gst_xml_helper_set_prop_string (root_xml_node, "type", (gchar *) "dynamic");
+
+
+  gst_xml_helper_set_prop_date_time (root_xml_node, "availabilityStartTime",
+      self->availabilityStartTime);
+
+  gst_xml_helper_set_prop_date_time (root_xml_node, "availabilityEndTime",
+      self->availabilityEndTime);
+  gst_xml_helper_set_prop_date_time (root_xml_node, "publishTime",
+      self->publishTime);
+
+  if (self->mediaPresentationDuration)
+    gst_xml_helper_set_prop_duration (root_xml_node,
+        "mediaPresentationDuration", self->mediaPresentationDuration);
+  if (self->minimumUpdatePeriod)
+    gst_xml_helper_set_prop_duration (root_xml_node, "minimumUpdatePeriod",
+        self->minimumUpdatePeriod);
+  if (self->minBufferTime)
+    gst_xml_helper_set_prop_duration (root_xml_node, "minBufferTime",
+        self->minBufferTime);
+  if (self->timeShiftBufferDepth)
+    gst_xml_helper_set_prop_duration (root_xml_node, "timeShiftBufferDepth",
+        self->timeShiftBufferDepth);
+  if (self->suggestedPresentationDelay)
+    gst_xml_helper_set_prop_duration (root_xml_node,
+        "suggestedPresentationDelay", self->suggestedPresentationDelay);
+  if (self->maxSegmentDuration)
+    gst_xml_helper_set_prop_duration (root_xml_node, "maxSegmentDuration",
+        self->maxSegmentDuration);
+  if (self->maxSubsegmentDuration)
+    gst_xml_helper_set_prop_duration (root_xml_node, "maxSubsegmentDuration",
+        self->maxSubsegmentDuration);
+
+  g_list_foreach (self->BaseURLs, gst_mpd_node_get_list_item, root_xml_node);
+  g_list_foreach (self->Locations, gst_mpd_node_get_list_item, root_xml_node);
+  g_list_foreach (self->ProgramInfos, gst_mpd_node_get_list_item,
+      root_xml_node);
+  g_list_foreach (self->Periods, gst_mpd_node_get_list_item, root_xml_node);
+  g_list_foreach (self->Metrics, gst_mpd_node_get_list_item, root_xml_node);
+  g_list_foreach (self->UTCTimings, gst_mpd_node_get_list_item, root_xml_node);
+
+  return root_xml_node;
+}
+
+static gboolean
+gst_mpd_root_get_xml_buffer (GstMPDNode * node, gchar ** doc_content,
+    gint * doc_size)
+{
+  xmlDocPtr doc;
+  xmlNodePtr root_xml_node;
+  xmlChar *xmlbody;
+
+  doc = xmlNewDoc ((xmlChar *) "1.0");
+  root_xml_node = gst_mpd_root_get_xml_node (node);
+  xmlDocSetRootElement (doc, root_xml_node);
+
+  xmlDocDumpMemory (doc, &xmlbody, doc_size);
+  *doc_content = g_strndup ((gchar *) xmlbody, *doc_size);
+  xmlFree (xmlbody);
+
+  xmlFreeDoc (doc);
+  return TRUE;
+}
+
+static void
+gst_mpd_root_node_class_init (GstMPDRootNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_root_node_finalize;
+  object_class->set_property = gst_mpd_root_node_set_property;
+  object_class->get_property = gst_mpd_root_node_get_property;
+
+  m_klass->get_xml_buffer = gst_mpd_root_get_xml_buffer;
+  m_klass->get_xml_node = gst_mpd_root_get_xml_node;
+
+  g_object_class_install_property (object_class,
+      PROP_MPD_ROOT_DEFAULT_NAMESPACE, g_param_spec_string ("default-namespace",
+          "default namespace", "default namespace", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class, PROP_MPD_ROOT_NAMESPACE_XSI,
+      g_param_spec_string ("namespace-xsi", "namespace xsi", "namespace xsi",
+          NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class, PROP_MPD_ROOT_NAMESPACE_EXT,
+      g_param_spec_string ("namespace-ext", "namespace ext", "namespace ext",
+          NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class, PROP_MPD_ROOT_SCHEMA_LOCATION,
+      g_param_spec_string ("schema-location", "schema location",
+          "schema location for period", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class, PROP_MPD_ROOT_ID,
+      g_param_spec_string ("id", "id", "unique id for period", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class, PROP_MPD_ROOT_PROFILES,
+      g_param_spec_string ("profiles", "profiles", "profiles", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class, PROP_MPD_ROOT_TYPE,
+      g_param_spec_int ("type", "MPD type",
+          "MPD type",
+          GST_MPD_FILE_TYPE_STATIC, GST_MPD_FILE_TYPE_DYNAMIC,
+          GST_MPD_FILE_TYPE_STATIC,
+          (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+  g_object_class_install_property (object_class,
+      PROP_MPD_ROOT_AVAILABILTY_START_TIME,
+      g_param_spec_boxed ("availability-start-time", "Availability start time",
+          "MPD availability start time", GST_TYPE_DATE_TIME,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_ROOT_AVAILABILTY_END_TIME,
+      g_param_spec_boxed ("availability-end-time", "Availability end time",
+          "MPD availability end time", GST_TYPE_DATE_TIME,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_ROOT_PUBLISH_TIME,
+      g_param_spec_boxed ("publish-time", "publish time",
+          "MPD publish time", GST_TYPE_DATE_TIME,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_ROOT_MEDIA_PRESENTATION_DURATION,
+      g_param_spec_uint64 ("media-presentation-duration",
+          "media presentation duration", "media presentation duration", 0,
+          G_MAXUINT64, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_ROOT_MINIMUM_UPDATE_PERIOD,
+      g_param_spec_uint64 ("minimum-update-period",
+          "minimum update period", "minimum update period", 0,
+          G_MAXUINT64, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_ROOT_MIN_BUFFER_TIME,
+      g_param_spec_uint64 ("min-buffer-time", "mininim buffer time",
+          "mininim buffer time", 0,
+          G_MAXUINT64, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_mpd_root_node_init (GstMPDRootNode * self)
+{
+  self->default_namespace = NULL;
+  self->namespace_xsi = NULL;
+  self->namespace_ext = NULL;
+  self->schemaLocation = NULL;
+  self->id = NULL;
+  self->profiles = NULL;
+  self->type = GST_MPD_FILE_TYPE_STATIC;
+  self->availabilityStartTime = NULL;
+  self->availabilityEndTime = NULL;
+  self->publishTime = NULL;
+  self->mediaPresentationDuration = 0;  /* [ms] */
+  self->minimumUpdatePeriod = 0;        /* [ms] */
+  self->minBufferTime = 2000;   /* [ms] */
+  self->timeShiftBufferDepth = 0;       /* [ms] */
+  self->suggestedPresentationDelay = 0; /* [ms] */
+  self->maxSegmentDuration = 0; /* [ms] */
+  self->maxSubsegmentDuration = 0;      /* [ms] */
+  /* list of BaseURL nodes */
+  self->BaseURLs = NULL;
+  /* list of Location nodes */
+  self->Locations = NULL;
+  /* List of ProgramInformation nodes */
+  self->ProgramInfos = NULL;
+  /* list of Periods nodes */
+  self->Periods = NULL;
+  /* list of Metrics nodes */
+  self->Metrics = NULL;
+  /* list of GstUTCTimingNode nodes */
+  self->UTCTimings = NULL;
+}
+
+GstMPDRootNode *
+gst_mpd_root_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_ROOT_NODE, NULL);
+}
+
+void
+gst_mpd_root_node_free (GstMPDRootNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrootnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdrootnode.h
new file mode 100644 (file)
index 0000000..db4f710
--- /dev/null
@@ -0,0 +1,73 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDROOTNODE_H__
+#define __GSTMPDROOTNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_ROOT_NODE gst_mpd_root_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDRootNode2, gst_mpd_root_node, GST, MPD_ROOT_NODE, GstMPDNode)
+
+typedef GstMPDRootNode2 GstMPDRootNode;
+typedef GstMPDRootNode2Class GstMPDRootNodeClass;
+
+struct _GstMPDRootNode2
+{
+  GstObject     parent_instance;
+  gchar *default_namespace;
+  gchar *namespace_xsi;
+  gchar *namespace_ext;
+  gchar *schemaLocation;
+  gchar *id;
+  gchar *profiles;
+  GstMPDFileType type;
+  GstDateTime *availabilityStartTime;
+  GstDateTime *availabilityEndTime;
+  GstDateTime *publishTime;
+  guint64 mediaPresentationDuration;  /* [ms] */
+  guint64 minimumUpdatePeriod;        /* [ms] */
+  guint64 minBufferTime;              /* [ms] */
+  guint64 timeShiftBufferDepth;       /* [ms] */
+  guint64 suggestedPresentationDelay; /* [ms] */
+  guint64 maxSegmentDuration;         /* [ms] */
+  guint64 maxSubsegmentDuration;      /* [ms] */
+  /* list of BaseURL nodes */
+  GList *BaseURLs;
+  /* list of Location nodes */
+  GList *Locations;
+  /* List of ProgramInformation nodes */
+  GList *ProgramInfos;
+  /* list of Periods nodes */
+  GList *Periods;
+  /* list of Metrics nodes */
+  GList *Metrics;
+  /* list of GstUTCTimingNode nodes */
+  GList *UTCTimings;
+};
+
+GstMPDRootNode * gst_mpd_root_node_new (void);
+void gst_mpd_root_node_free (GstMPDRootNode* self);
+
+G_END_DECLS
+#endif /* __GSTMPDROOTNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentbasenode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentbasenode.c
new file mode 100644 (file)
index 0000000..ad20972
--- /dev/null
@@ -0,0 +1,112 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdsegmentbasenode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDSegmentBaseNode2, gst_mpd_segment_base_node,
+    GST_TYPE_MPD_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_segment_base_node_finalize (GObject * object)
+{
+  GstMPDSegmentBaseNode *self = GST_MPD_SEGMENT_BASE_NODE (object);
+
+  if (self->indexRange)
+    g_slice_free (GstXMLRange, self->indexRange);
+  gst_mpd_url_type_node_free (self->Initialization);
+  gst_mpd_url_type_node_free (self->RepresentationIndex);
+
+  G_OBJECT_CLASS (gst_mpd_segment_base_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_segment_base_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr segment_base_xml_node = NULL;
+  GstMPDSegmentBaseNode *self = GST_MPD_SEGMENT_BASE_NODE (node);
+
+  segment_base_xml_node = xmlNewNode (NULL, (xmlChar *) "SegmentBase");
+
+  if (self->timescale)
+    gst_xml_helper_set_prop_uint (segment_base_xml_node, "timescale",
+        self->timescale);
+  if (self->presentationTimeOffset)
+    gst_xml_helper_set_prop_uint64 (segment_base_xml_node,
+        "presentationTimeOffset", self->presentationTimeOffset);
+  if (self->indexRange) {
+    gst_xml_helper_set_prop_range (segment_base_xml_node, "indexRange",
+        self->indexRange);
+    gst_xml_helper_set_prop_boolean (segment_base_xml_node, "indexRangeExact",
+        self->indexRangeExact);
+  }
+  if (self->Initialization)
+    gst_mpd_node_add_child_node (GST_MPD_NODE (self->Initialization),
+        segment_base_xml_node);
+  if (self->RepresentationIndex)
+    gst_mpd_node_add_child_node (GST_MPD_NODE (self->RepresentationIndex),
+        segment_base_xml_node);
+
+  return segment_base_xml_node;
+}
+
+static void
+gst_mpd_segment_base_node_class_init (GstMPDSegmentBaseNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_segment_base_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_segment_base_get_xml_node;
+}
+
+static void
+gst_mpd_segment_base_node_init (GstMPDSegmentBaseNode * self)
+{
+  self->timescale = 0;
+  self->presentationTimeOffset = 0;
+  self->indexRange = NULL;
+  self->indexRangeExact = FALSE;
+  /* Initialization node */
+  self->Initialization = NULL;
+  /* RepresentationIndex node */
+  self->RepresentationIndex = NULL;
+}
+
+GstMPDSegmentBaseNode *
+gst_mpd_segment_base_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_SEGMENT_BASE_NODE, NULL);
+}
+
+void
+gst_mpd_segment_base_node_free (GstMPDSegmentBaseNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentbasenode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentbasenode.h
new file mode 100644 (file)
index 0000000..7c0e749
--- /dev/null
@@ -0,0 +1,56 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDSEGMENTBASENODE_H__
+#define __GSTMPDSEGMENTBASENODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdnode.h"
+#include "gstmpdurltypenode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_SEGMENT_BASE_NODE gst_mpd_segment_base_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDSegmentBaseNode2, gst_mpd_segment_base_node, GST, MPD_SEGMENT_BASE_NODE, GstMPDNode)
+
+typedef GstMPDSegmentBaseNode2 GstMPDSegmentBaseNode;
+typedef GstMPDSegmentBaseNode2Class GstMPDSegmentBaseNodeClass;
+
+#define glib_autoptr_clear_GstMPDMultSegmentBaseNode glib_autoptr_clear_GstMPDMultSegmentBaseNode2
+
+struct _GstMPDSegmentBaseNode2
+{
+  GstObject     parent_instance;
+  guint timescale;
+  guint64 presentationTimeOffset;
+  GstXMLRange *indexRange;
+  gboolean indexRangeExact;
+  /* Initialization node */
+  GstMPDURLTypeNode *Initialization;
+  /* RepresentationIndex node */
+  GstMPDURLTypeNode *RepresentationIndex;
+};
+
+GstMPDSegmentBaseNode * gst_mpd_segment_base_node_new (void);
+void gst_mpd_segment_base_node_free (GstMPDSegmentBaseNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDSEGMENTBASENODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentlistnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentlistnode.c
new file mode 100644 (file)
index 0000000..b27bf14
--- /dev/null
@@ -0,0 +1,104 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdsegmentlistnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDSegmentListNode2, gst_mpd_segment_list_node,
+    GST_TYPE_MPD_MULT_SEGMENT_BASE_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_segment_list_node_finalize (GObject * object)
+{
+  GstMPDSegmentListNode *self = GST_MPD_SEGMENT_LIST_NODE (object);
+
+  g_list_free_full (self->SegmentURL,
+      (GDestroyNotify) gst_mpd_segment_url_node_free);
+  if (self->xlink_href)
+    xmlFree (self->xlink_href);
+
+  G_OBJECT_CLASS (gst_mpd_segment_list_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_segment_list_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr segment_list_xml_node = NULL;
+  GstMPDSegmentListNode *self = GST_MPD_SEGMENT_LIST_NODE (node);
+
+  segment_list_xml_node = xmlNewNode (NULL, (xmlChar *) "SegmentList");
+
+  g_list_foreach (self->SegmentURL, gst_mpd_node_get_list_item,
+      segment_list_xml_node);
+
+  if (self->xlink_href)
+    gst_xml_helper_set_prop_string (segment_list_xml_node, "xlink_href",
+        self->xlink_href);
+
+  return segment_list_xml_node;
+}
+
+static void
+gst_mpd_segment_list_node_class_init (GstMPDSegmentListNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_segment_list_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_segment_list_get_xml_node;
+}
+
+static void
+gst_mpd_segment_list_node_init (GstMPDSegmentListNode * self)
+{
+  self->SegmentURL = NULL;
+  self->xlink_href = NULL;
+  self->actuate = GST_MPD_XLINK_ACTUATE_ON_REQUEST;
+}
+
+GstMPDSegmentListNode *
+gst_mpd_segment_list_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_SEGMENT_LIST_NODE, NULL);
+}
+
+void
+gst_mpd_segment_list_node_free (GstMPDSegmentListNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
+
+void
+gst_mpd_segment_list_node_add_segment (GstMPDSegmentListNode * self,
+    GstMPDSegmentURLNode * segment_url)
+{
+  g_return_if_fail (self != NULL);
+
+  self->SegmentURL = g_list_append (self->SegmentURL, segment_url);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentlistnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmentlistnode.h
new file mode 100644 (file)
index 0000000..88784d1
--- /dev/null
@@ -0,0 +1,55 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDSEGMENTLISTNODE_H__
+#define __GSTMPDSEGMENTLISTNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+#include "gstmpdmultsegmentbasenode.h"
+#include "gstmpdsegmenturlnode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_SEGMENT_LIST_NODE gst_mpd_segment_list_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDSegmentListNode2, gst_mpd_segment_list_node, GST, MPD_SEGMENT_LIST_NODE, GstMPDMultSegmentBaseNode)
+
+typedef GstMPDSegmentListNode2 GstMPDSegmentListNode;
+typedef GstMPDSegmentListNode2Class GstMPDSegmentListNodeClass;
+
+struct _GstMPDSegmentListNode2
+{
+  GstMPDMultSegmentBaseNode parent_instance;
+  /* extension */
+  /* list of SegmentURL nodes */
+  GList *SegmentURL;
+
+  gchar *xlink_href;
+  GstMPDXLinkActuate actuate;
+};
+
+GstMPDSegmentListNode * gst_mpd_segment_list_node_new (void);
+void gst_mpd_segment_list_node_free (GstMPDSegmentListNode* self);
+
+void gst_mpd_segment_list_node_add_segment(GstMPDSegmentListNode * self, GstMPDSegmentURLNode * segment_url);
+
+G_END_DECLS
+
+#endif /* __GSTMPDSEGMENTLISTNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttemplatenode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttemplatenode.c
new file mode 100644 (file)
index 0000000..44aa0fe
--- /dev/null
@@ -0,0 +1,186 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdsegmenttemplatenode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDSegmentTemplateNode2, gst_mpd_segment_template_node,
+    GST_TYPE_MPD_MULT_SEGMENT_BASE_NODE);
+
+enum
+{
+  PROP_MPD_SEGMENT_TEMPLATE_0,
+  PROP_MPD_SEGMENT_TEMPLATE_MEDIA,
+  PROP_MPD_SEGMENT_TEMPLATE_INDEX,
+  PROP_MPD_SEGMENT_TEMPLATE_INITIALIZATION,
+  PROP_MPD_SEGMENT_TEMPLATE_BITSTREAM_SWITCHING,
+};
+
+/* GObject VMethods */
+
+static void
+gst_mpd_segment_template_node_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstMPDSegmentTemplateNode *self = GST_MPD_SEGMENT_TEMPLATE_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_SEGMENT_TEMPLATE_MEDIA:
+      self->media = g_value_dup_string (value);
+      break;
+    case PROP_MPD_SEGMENT_TEMPLATE_INDEX:
+      self->index = g_value_dup_string (value);
+      break;
+    case PROP_MPD_SEGMENT_TEMPLATE_INITIALIZATION:
+      self->initialization = g_value_dup_string (value);
+      break;
+    case PROP_MPD_SEGMENT_TEMPLATE_BITSTREAM_SWITCHING:
+      self->bitstreamSwitching = g_value_dup_string (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_segment_template_node_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstMPDSegmentTemplateNode *self = GST_MPD_SEGMENT_TEMPLATE_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_SEGMENT_TEMPLATE_MEDIA:
+      g_value_set_string (value, self->media);
+      break;
+    case PROP_MPD_SEGMENT_TEMPLATE_INDEX:
+      g_value_set_string (value, self->index);
+      break;
+    case PROP_MPD_SEGMENT_TEMPLATE_INITIALIZATION:
+      g_value_set_string (value, self->initialization);
+      break;
+    case PROP_MPD_SEGMENT_TEMPLATE_BITSTREAM_SWITCHING:
+      g_value_set_string (value, self->bitstreamSwitching);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_segment_template_node_finalize (GObject * object)
+{
+  GstMPDSegmentTemplateNode *self = GST_MPD_SEGMENT_TEMPLATE_NODE (object);
+
+  if (self->media)
+    xmlFree (self->media);
+  if (self->index)
+    xmlFree (self->index);
+  if (self->initialization)
+    xmlFree (self->initialization);
+  if (self->bitstreamSwitching)
+    xmlFree (self->bitstreamSwitching);
+
+  G_OBJECT_CLASS (gst_mpd_segment_template_node_parent_class)->finalize
+      (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_segment_template_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr segment_template_xml_node = NULL;
+  GstMPDSegmentTemplateNode *self = GST_MPD_SEGMENT_TEMPLATE_NODE (node);
+
+  segment_template_xml_node = xmlNewNode (NULL, (xmlChar *) "SegmentTemplate");
+
+  if (self->media)
+    gst_xml_helper_set_prop_string (segment_template_xml_node, "media",
+        self->media);
+
+  if (self->index)
+    gst_xml_helper_set_prop_string (segment_template_xml_node, "index",
+        self->index);
+
+  if (self->initialization)
+    gst_xml_helper_set_prop_string (segment_template_xml_node, "initialization",
+        self->initialization);
+
+  if (self->bitstreamSwitching)
+    gst_xml_helper_set_prop_string (segment_template_xml_node,
+        "bitstreamSwitching", self->bitstreamSwitching);
+
+  return segment_template_xml_node;
+}
+
+static void
+gst_mpd_segment_template_node_class_init (GstMPDSegmentTemplateNodeClass *
+    klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_segment_template_node_finalize;
+  object_class->set_property = gst_mpd_segment_template_node_set_property;
+  object_class->get_property = gst_mpd_segment_template_node_get_property;
+
+  m_klass->get_xml_node = gst_mpd_segment_template_get_xml_node;
+
+  g_object_class_install_property (object_class,
+      PROP_MPD_SEGMENT_TEMPLATE_MEDIA, g_param_spec_string ("media",
+          "media", "media", NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_SEGMENT_TEMPLATE_INDEX, g_param_spec_string ("index",
+          "index", "index", NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_SEGMENT_TEMPLATE_INITIALIZATION,
+      g_param_spec_string ("initialization", "initialization", "initialization",
+          NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (object_class,
+      PROP_MPD_SEGMENT_TEMPLATE_BITSTREAM_SWITCHING,
+      g_param_spec_string ("bitstream-switching", "bitstream switching",
+          "bitstream switching", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_mpd_segment_template_node_init (GstMPDSegmentTemplateNode * self)
+{
+  self->media = NULL;
+  self->index = NULL;
+  self->initialization = NULL;
+  self->bitstreamSwitching = NULL;
+}
+
+GstMPDSegmentTemplateNode *
+gst_mpd_segment_template_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_SEGMENT_TEMPLATE_NODE, NULL);
+}
+
+void
+gst_mpd_segment_template_node_free (GstMPDSegmentTemplateNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttemplatenode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttemplatenode.h
new file mode 100644 (file)
index 0000000..108db5c
--- /dev/null
@@ -0,0 +1,51 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDSEGMENTTEMPLATENODE_H__
+#define __GSTMPDSEGMENTTEMPLATENODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+#include "gstmpdmultsegmentbasenode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_SEGMENT_TEMPLATE_NODE gst_mpd_segment_template_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDSegmentTemplateNode2, gst_mpd_segment_template_node, GST, MPD_SEGMENT_TEMPLATE_NODE, GstMPDMultSegmentBaseNode)
+
+typedef GstMPDSegmentTemplateNode2 GstMPDSegmentTemplateNode;
+typedef GstMPDSegmentTemplateNode2Class GstMPDSegmentTemplateNodeClass;
+
+struct _GstMPDSegmentTemplateNode2
+{
+  GstMPDMultSegmentBaseNode     parent_instance;
+
+  gchar *media;
+  gchar *index;
+  gchar *initialization;
+  gchar *bitstreamSwitching;
+};
+
+GstMPDSegmentTemplateNode * gst_mpd_segment_template_node_new (void);
+void gst_mpd_segment_template_node_free (GstMPDSegmentTemplateNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDSEGMENTTEMPLATENODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttimelinenode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttimelinenode.c
new file mode 100644 (file)
index 0000000..01ae144
--- /dev/null
@@ -0,0 +1,111 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdsegmenttimelinenode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDSegmentTimelineNode2, gst_mpd_segment_timeline_node,
+    GST_TYPE_MPD_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_segment_timeline_node_finalize (GObject * object)
+{
+  GstMPDSegmentTimelineNode *self = GST_MPD_SEGMENT_TIMELINE_NODE (object);
+
+  g_queue_foreach (&self->S, (GFunc) gst_mpd_s_node_free, NULL);
+  g_queue_clear (&self->S);
+
+  G_OBJECT_CLASS (gst_mpd_segment_timeline_node_parent_class)->finalize
+      (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_segment_timeline_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr segment_timeline_xml_node = NULL;
+  GstMPDSegmentTimelineNode *self = GST_MPD_SEGMENT_TIMELINE_NODE (node);
+
+  segment_timeline_xml_node = xmlNewNode (NULL, (xmlChar *) "SegmentTimeline");
+
+  g_queue_foreach (&self->S, (GFunc) gst_mpd_node_get_list_item,
+      segment_timeline_xml_node);
+
+  return segment_timeline_xml_node;
+}
+
+static void
+gst_mpd_segment_timeline_node_class_init (GstMPDSegmentTimelineNodeClass *
+    klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_segment_timeline_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_segment_timeline_get_xml_node;
+}
+
+static void
+gst_mpd_segment_timeline_node_init (GstMPDSegmentTimelineNode * self)
+{
+  g_queue_init (&self->S);
+}
+
+GstMPDSegmentTimelineNode *
+gst_mpd_segment_timeline_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_SEGMENT_TIMELINE_NODE, NULL);
+}
+
+void
+gst_mpd_segment_timeline_node_free (GstMPDSegmentTimelineNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
+
+GstMPDSegmentTimelineNode *
+gst_mpd_segment_timeline_node_clone (GstMPDSegmentTimelineNode *
+    segment_timeline)
+{
+  GstMPDSegmentTimelineNode *clone = NULL;
+  GList *list;
+
+  if (segment_timeline) {
+    clone = gst_mpd_segment_timeline_node_new ();
+    for (list = g_queue_peek_head_link (&segment_timeline->S); list;
+        list = g_list_next (list)) {
+      GstMPDSNode *s_node;
+      s_node = (GstMPDSNode *) list->data;
+      if (s_node) {
+        g_queue_push_tail (&clone->S, gst_mpd_s_node_clone (s_node));
+      }
+    }
+  }
+
+  return clone;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttimelinenode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenttimelinenode.h
new file mode 100644 (file)
index 0000000..117bc00
--- /dev/null
@@ -0,0 +1,49 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDSEGMENTTIMELINENODE_H__
+#define __GSTMPDSEGMENTTIMELINENODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdnode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_SEGMENT_TIMELINE_NODE gst_mpd_segment_timeline_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDSegmentTimelineNode2, gst_mpd_segment_timeline_node, GST, MPD_SEGMENT_TIMELINE_NODE, GstMPDNode)
+
+typedef GstMPDSegmentTimelineNode2 GstMPDSegmentTimelineNode;
+typedef GstMPDSegmentTimelineNode2Class GstMPDSegmentTimelineNodeClass;
+
+struct _GstMPDSegmentTimelineNode2
+{
+  GstObject parent_instance;
+  /* list of S nodes */
+  GQueue S;
+};
+
+GstMPDSegmentTimelineNode * gst_mpd_segment_timeline_node_new (void);
+void gst_mpd_segment_timeline_node_free (GstMPDSegmentTimelineNode* self);
+
+GstMPDSegmentTimelineNode *gst_mpd_segment_timeline_node_clone (GstMPDSegmentTimelineNode * pointer);
+
+G_END_DECLS
+
+#endif /* __GSTMPDSEGMENTTIMELINENODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenturlnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenturlnode.c
new file mode 100644 (file)
index 0000000..fec6032
--- /dev/null
@@ -0,0 +1,166 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdsegmenturlnode.h"
+#include "gstmpdparser.h"
+#include "gstmpdhelper.h"
+
+G_DEFINE_TYPE (GstMPDSegmentURLNode2, gst_mpd_segment_url_node,
+    GST_TYPE_MPD_NODE);
+
+enum
+{
+  PROP_MPD_SEGMENT_URL_0,
+  PROP_MPD_SEGMENT_URL_MEDIA,
+};
+
+/* GObject VMethods */
+
+static void
+gst_mpd_segment_url_node_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstMPDSegmentURLNode *self = GST_MPD_SEGMENT_URL_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_SEGMENT_URL_MEDIA:
+      g_free (self->media);
+      self->media = g_value_dup_string (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_segment_url_node_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstMPDSegmentURLNode *self = GST_MPD_SEGMENT_URL_NODE (object);
+  switch (prop_id) {
+    case PROP_MPD_SEGMENT_URL_MEDIA:
+      g_value_set_string (value, self->media);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_mpd_segment_url_node_finalize (GObject * object)
+{
+  GstMPDSegmentURLNode *self = GST_MPD_SEGMENT_URL_NODE (object);
+
+  if (self->media)
+    xmlFree (self->media);
+  g_slice_free (GstXMLRange, self->mediaRange);
+  if (self->index)
+    xmlFree (self->index);
+  g_slice_free (GstXMLRange, self->indexRange);
+
+  G_OBJECT_CLASS (gst_mpd_segment_url_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_segment_url_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr segment_url_xml_node = NULL;
+  GstMPDSegmentURLNode *self = GST_MPD_SEGMENT_URL_NODE (node);
+
+  segment_url_xml_node = xmlNewNode (NULL, (xmlChar *) "SegmentURL");
+
+  if (self->media)
+    gst_xml_helper_set_prop_string (segment_url_xml_node, "media", self->media);
+
+  if (self->mediaRange)
+    gst_xml_helper_set_prop_range (segment_url_xml_node, "mediaRange",
+        self->mediaRange);
+
+  if (self->index)
+    gst_xml_helper_set_prop_string (segment_url_xml_node, "index", self->index);
+
+  if (self->indexRange)
+    gst_xml_helper_set_prop_range (segment_url_xml_node, "indexRange",
+        self->indexRange);
+
+  return segment_url_xml_node;
+}
+
+static void
+gst_mpd_segment_url_node_class_init (GstMPDSegmentURLNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_segment_url_node_finalize;
+  object_class->set_property = gst_mpd_segment_url_node_set_property;
+  object_class->get_property = gst_mpd_segment_url_node_get_property;
+
+  m_klass->get_xml_node = gst_mpd_segment_url_get_xml_node;
+
+  g_object_class_install_property (object_class,
+      PROP_MPD_SEGMENT_URL_MEDIA, g_param_spec_string ("media",
+          "media", "media description", NULL,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_mpd_segment_url_node_init (GstMPDSegmentURLNode * self)
+{
+  self->media = NULL;
+  self->mediaRange = NULL;
+  self->index = NULL;
+  self->indexRange = NULL;
+}
+
+GstMPDSegmentURLNode *
+gst_mpd_segment_url_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_SEGMENT_URL_NODE, NULL);
+}
+
+void
+gst_mpd_segment_url_node_free (GstMPDSegmentURLNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
+
+GstMPDSegmentURLNode *
+gst_mpd_segment_url_node_clone (GstMPDSegmentURLNode * seg_url)
+{
+  GstMPDSegmentURLNode *clone = NULL;
+
+  if (seg_url) {
+    clone = gst_mpd_segment_url_node_new ();
+    clone->media = xmlMemStrdup (seg_url->media);
+    clone->mediaRange = gst_xml_helper_clone_range (seg_url->mediaRange);
+    clone->index = xmlMemStrdup (seg_url->index);
+    clone->indexRange = gst_xml_helper_clone_range (seg_url->indexRange);
+  }
+
+  return clone;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenturlnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsegmenturlnode.h
new file mode 100644 (file)
index 0000000..ee5d9ab
--- /dev/null
@@ -0,0 +1,51 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDSEGMENTURLNODE_H__
+#define __GSTMPDSEGMENTURLNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdnode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_SEGMENT_URL_NODE gst_mpd_segment_url_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDSegmentURLNode2, gst_mpd_segment_url_node, GST, MPD_SEGMENT_URL_NODE, GstMPDNode)
+
+typedef GstMPDSegmentURLNode2 GstMPDSegmentURLNode;
+typedef GstMPDSegmentURLNode2Class GstMPDSegmentURLNodeClass;
+
+struct _GstMPDSegmentURLNode2
+{
+  GstObject parent_instance;
+  gchar *media;
+  GstXMLRange *mediaRange;
+  gchar *index;
+  GstXMLRange *indexRange;
+};
+
+GstMPDSegmentURLNode * gst_mpd_segment_url_node_new (void);
+void gst_mpd_segment_url_node_free (GstMPDSegmentURLNode* self);
+
+GstMPDSegmentURLNode *gst_mpd_segment_url_node_clone (GstMPDSegmentURLNode * seg_url);
+
+G_END_DECLS
+
+#endif /* __GSTMPDSEGMENTURLNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsnode.c
new file mode 100644 (file)
index 0000000..3cd6213
--- /dev/null
@@ -0,0 +1,92 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdsnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDSNode2, gst_mpd_s_node, GST_TYPE_MPD_NODE);
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_s_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr s_xml_node = NULL;
+  GstMPDSNode *self = GST_MPD_S_NODE (node);
+
+  s_xml_node = xmlNewNode (NULL, (xmlChar *) "S");
+
+  if (self->t)
+    gst_xml_helper_set_prop_uint64 (s_xml_node, "t", self->t);
+
+  if (self->d)
+    gst_xml_helper_set_prop_uint64 (s_xml_node, "d", self->d);
+
+  if (self->r)
+    gst_xml_helper_set_prop_int (s_xml_node, "r", self->r);
+
+  return s_xml_node;
+}
+
+static void
+gst_mpd_s_node_class_init (GstMPDSNodeClass * klass)
+{
+  GstMPDNodeClass *m_klass;
+
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  m_klass->get_xml_node = gst_mpd_s_get_xml_node;
+}
+
+static void
+gst_mpd_s_node_init (GstMPDSNode * self)
+{
+  self->t = 0;
+  self->d = 0;
+  self->r = 0;
+}
+
+GstMPDSNode *
+gst_mpd_s_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_S_NODE, NULL);
+}
+
+void
+gst_mpd_s_node_free (GstMPDSNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
+
+GstMPDSNode *
+gst_mpd_s_node_clone (GstMPDSNode * s_node)
+{
+  GstMPDSNode *clone = NULL;
+
+  if (s_node) {
+    clone = gst_mpd_s_node_new ();
+    clone->t = s_node->t;
+    clone->d = s_node->d;
+    clone->r = s_node->r;
+  }
+
+  return clone;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsnode.h
new file mode 100644 (file)
index 0000000..eb6f968
--- /dev/null
@@ -0,0 +1,50 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDSNODE_H__
+#define __GSTMPDSNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdnode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_S_NODE gst_mpd_s_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDSNode2, gst_mpd_s_node, GST, MPD_S_NODE, GstMPDNode)
+
+typedef GstMPDSNode2 GstMPDSNode;
+typedef GstMPDSNode2Class GstMPDSNodeClass;
+
+struct _GstMPDSNode2
+{
+  GstObject parent_instance;
+  guint64 t;
+  guint64 d;
+  gint r;
+};
+
+GstMPDSNode * gst_mpd_s_node_new (void);
+void gst_mpd_s_node_free (GstMPDSNode* self);
+
+GstMPDSNode *gst_mpd_s_node_clone (GstMPDSNode * pointer);
+
+G_END_DECLS
+
+#endif /* __GSTMPDSNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubrepresentationnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubrepresentationnode.c
new file mode 100644 (file)
index 0000000..0fbf700
--- /dev/null
@@ -0,0 +1,109 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdsubrepresentationnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDSubRepresentationNode2, gst_mpd_sub_representation_node,
+    GST_TYPE_MPD_REPRESENTATION_BASE_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_sub_representation_node_finalize (GObject * object)
+{
+  GstMPDSubRepresentationNode *self = GST_MPD_SUB_REPRESENTATION_NODE (object);
+
+  if (self->dependencyLevel)
+    xmlFree (self->dependencyLevel);
+  g_strfreev (self->contentComponent);
+
+  G_OBJECT_CLASS (gst_mpd_sub_representation_node_parent_class)->finalize
+      (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_sub_representation_get_xml_node (GstMPDNode * node)
+{
+  gchar *value = NULL;
+  xmlNodePtr sub_representation_xml_node = NULL;
+  GstMPDSubRepresentationNode *self = GST_MPD_SUB_REPRESENTATION_NODE (node);
+
+  sub_representation_xml_node =
+      xmlNewNode (NULL, (xmlChar *) "SubRepresentation");
+
+  gst_xml_helper_set_prop_uint (sub_representation_xml_node, "level",
+      self->level);
+
+  gst_xml_helper_set_prop_uint_vector_type (sub_representation_xml_node,
+      "dependencyLevel", self->dependencyLevel, self->dependencyLevel_size);
+
+  gst_xml_helper_set_prop_uint (sub_representation_xml_node, "bandwidth",
+      self->level);
+
+  if (self->contentComponent) {
+    value = g_strjoinv (" ", self->contentComponent);
+    gst_xml_helper_set_prop_string (sub_representation_xml_node,
+        "contentComponent", value);
+    g_free (value);
+  }
+
+  return sub_representation_xml_node;
+}
+
+static void
+gst_mpd_sub_representation_node_class_init (GstMPDSubRepresentationNodeClass *
+    klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_sub_representation_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_sub_representation_get_xml_node;
+}
+
+static void
+gst_mpd_sub_representation_node_init (GstMPDSubRepresentationNode * self)
+{
+  self->level = 0;
+  self->dependencyLevel = NULL;
+  self->dependencyLevel_size = 0;
+  self->bandwidth = 0;
+  self->contentComponent = NULL;
+}
+
+GstMPDSubRepresentationNode *
+gst_mpd_sub_representation_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_SUB_REPRESENTATION_NODE, NULL);
+}
+
+void
+gst_mpd_sub_representation_node_free (GstMPDSubRepresentationNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubrepresentationnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubrepresentationnode.h
new file mode 100644 (file)
index 0000000..25aedfd
--- /dev/null
@@ -0,0 +1,52 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDSUBREPRESENTATIONNODE_H__
+#define __GSTMPDSUBREPRESENTATIONNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdhelper.h"
+#include "gstmpdrepresentationbasenode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_SUB_REPRESENTATION_NODE gst_mpd_sub_representation_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDSubRepresentationNode2, gst_mpd_sub_representation_node, GST, MPD_SUB_REPRESENTATION_NODE, GstMPDRepresentationBaseNode)
+
+typedef GstMPDSubRepresentationNode2 GstMPDSubRepresentationNode;
+typedef GstMPDSubRepresentationNode2Class GstMPDSubRepresentationNodeClass;
+
+struct _GstMPDSubRepresentationNode2
+{
+  GstMPDRepresentationBaseNode parent_instance;
+  /* RepresentationBase extension */
+  guint level;
+  guint *dependencyLevel;            /* UIntVectorType */
+  guint dependencyLevel_size;        /* size of "dependencyLevel" array */
+  guint bandwidth;
+  gchar **contentComponent;          /* StringVectorType */
+};
+
+GstMPDSubRepresentationNode * gst_mpd_sub_representation_node_new (void);
+void gst_mpd_sub_representation_node_free (GstMPDSubRepresentationNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDSUBREPRESENTATIONNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubsetnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubsetnode.c
new file mode 100644 (file)
index 0000000..5130473
--- /dev/null
@@ -0,0 +1,88 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdsubsetnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDSubsetNode2, gst_mpd_subset_node, GST_TYPE_MPD_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_subset_node_finalize (GObject * object)
+{
+  GstMPDSubsetNode *self = GST_MPD_SUBSET_NODE (object);
+
+  if (self->contains)
+    xmlFree (self->contains);
+
+  G_OBJECT_CLASS (gst_mpd_subset_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_subset_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr subset_xml_node = NULL;
+  GstMPDSubsetNode *self = GST_MPD_SUBSET_NODE (node);
+
+  subset_xml_node = xmlNewNode (NULL, (xmlChar *) "Subset");
+
+  if (self->contains)
+    gst_xml_helper_set_prop_uint_vector_type (subset_xml_node, "contains",
+        self->contains, self->contains_size);
+
+  return subset_xml_node;
+}
+
+static void
+gst_mpd_subset_node_class_init (GstMPDSubsetNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_subset_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_subset_get_xml_node;
+}
+
+static void
+gst_mpd_subset_node_init (GstMPDSubsetNode * self)
+{
+  self->contains_size = 0;
+  self->contains = NULL;
+}
+
+GstMPDSubsetNode *
+gst_mpd_subset_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_SUBSET_NODE, NULL);
+}
+
+void
+gst_mpd_subset_node_free (GstMPDSubsetNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubsetnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdsubsetnode.h
new file mode 100644 (file)
index 0000000..f190f69
--- /dev/null
@@ -0,0 +1,52 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDSUBSETNODE_H__
+#define __GSTMPDSUBSETNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdnode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_SUBSET_NODE gst_mpd_subset_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDSubsetNode2, gst_mpd_subset_node, GST, MPD_SUBSET_NODE, GstMPDNode)
+
+typedef GstMPDSubsetNode2 GstMPDSubsetNode;
+typedef GstMPDSubsetNode2Class GstMPDSubsetNodeClass;
+
+struct _GstMPDSubsetNode2
+{
+  GstObject parent_instance;
+  guint *contains;                   /* UIntVectorType */
+  guint contains_size;               /* size of the "contains" array */
+};
+
+struct _GstMPDSubsetNodeClass {
+  GstMPDNodeClass parent_class;
+};
+
+
+GstMPDSubsetNode * gst_mpd_subset_node_new (void);
+void gst_mpd_subset_node_free (GstMPDSubsetNode* self);
+
+G_END_DECLS
+
+#endif /* __GSTMPDSUBSETNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdurltypenode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdurltypenode.c
new file mode 100644 (file)
index 0000000..fc85d48
--- /dev/null
@@ -0,0 +1,110 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdurltypenode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDURLTypeNode2, gst_mpd_url_type_node, GST_TYPE_MPD_NODE);
+
+/* GObject VMethods */
+
+static void
+gst_mpd_url_type_node_finalize (GObject * object)
+{
+  GstMPDURLTypeNode *self = GST_MPD_URL_TYPE_NODE (object);
+
+  if (self->sourceURL)
+    xmlFree (self->sourceURL);
+  g_slice_free (GstXMLRange, self->range);
+  g_free (self->node_name);
+
+  G_OBJECT_CLASS (gst_mpd_url_type_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_url_type_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr url_type_xml_node = NULL;
+  GstMPDURLTypeNode *self = GST_MPD_URL_TYPE_NODE (node);
+
+  url_type_xml_node = xmlNewNode (NULL, (xmlChar *) self->node_name);
+
+  gst_xml_helper_set_prop_string (url_type_xml_node, "sourceURL",
+      self->sourceURL);
+  gst_xml_helper_set_prop_range (url_type_xml_node, "range", self->range);
+
+  return url_type_xml_node;
+}
+
+static void
+gst_mpd_url_type_node_class_init (GstMPDURLTypeNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_url_type_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_url_type_get_xml_node;
+}
+
+static void
+gst_mpd_url_type_node_init (GstMPDURLTypeNode * self)
+{
+  self->node_name = NULL;
+  self->sourceURL = NULL;
+  self->range = NULL;
+}
+
+GstMPDURLTypeNode *
+gst_mpd_url_type_node_new (const gchar * name)
+{
+  GstMPDURLTypeNode *self = g_object_new (GST_TYPE_MPD_URL_TYPE_NODE, NULL);
+  self->node_name = g_strdup (name);
+  return self;
+}
+
+void
+gst_mpd_url_type_node_free (GstMPDURLTypeNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
+
+GstMPDURLTypeNode *
+gst_mpd_url_type_node_clone (GstMPDURLTypeNode * url)
+{
+
+  GstMPDURLTypeNode *clone = NULL;
+
+  if (url) {
+    clone = gst_mpd_url_type_node_new (url->node_name);
+    if (url->sourceURL) {
+      clone->sourceURL = xmlMemStrdup (url->sourceURL);
+    }
+    clone->range = gst_xml_helper_clone_range (url->range);
+  }
+
+  return clone;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdurltypenode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdurltypenode.h
new file mode 100644 (file)
index 0000000..2e933b2
--- /dev/null
@@ -0,0 +1,50 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDURLTYPENODE_H__
+#define __GSTMPDURLTYPENODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdnode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_URL_TYPE_NODE gst_mpd_url_type_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDURLTypeNode2, gst_mpd_url_type_node, GST, MPD_URL_TYPE_NODE, GstMPDNode)
+
+typedef GstMPDURLTypeNode2 GstMPDURLTypeNode;
+typedef GstMPDURLTypeNode2Class GstMPDURLTypeNodeClass;
+
+struct _GstMPDURLTypeNode2
+{
+  GstObject     parent_instance;
+  gchar* node_name;
+  gchar *sourceURL;
+  GstXMLRange *range;
+};
+
+GstMPDURLTypeNode * gst_mpd_url_type_node_new (const gchar* name);
+void gst_mpd_url_type_node_free (GstMPDURLTypeNode* self);
+
+GstMPDURLTypeNode *gst_mpd_url_type_node_clone (GstMPDURLTypeNode * url);
+
+G_END_DECLS
+
+#endif /* __GSTMPDURLTYPENODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdutctimingnode.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdutctimingnode.c
new file mode 100644 (file)
index 0000000..f99ec13
--- /dev/null
@@ -0,0 +1,140 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+#include "gstmpdutctimingnode.h"
+#include "gstmpdparser.h"
+
+G_DEFINE_TYPE (GstMPDUTCTimingNode2, gst_mpd_utctiming_node, GST_TYPE_MPD_NODE);
+
+static const struct GstMPDUTCTimingMethod gst_mpd_utctiming_methods[] = {
+  {"urn:mpeg:dash:utc:ntp:2014", GST_MPD_UTCTIMING_TYPE_NTP},
+  {"urn:mpeg:dash:utc:sntp:2014", GST_MPD_UTCTIMING_TYPE_SNTP},
+  {"urn:mpeg:dash:utc:http-head:2014", GST_MPD_UTCTIMING_TYPE_HTTP_HEAD},
+  {"urn:mpeg:dash:utc:http-xsdate:2014", GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE},
+  {"urn:mpeg:dash:utc:http-iso:2014", GST_MPD_UTCTIMING_TYPE_HTTP_ISO},
+  {"urn:mpeg:dash:utc:http-ntp:2014", GST_MPD_UTCTIMING_TYPE_HTTP_NTP},
+  {"urn:mpeg:dash:utc:direct:2014", GST_MPD_UTCTIMING_TYPE_DIRECT},
+  /*
+   * Early working drafts used the :2012 namespace and this namespace is
+   * used by some DASH packagers. To work-around these packagers, we also
+   * accept the early draft scheme names.
+   */
+  {"urn:mpeg:dash:utc:ntp:2012", GST_MPD_UTCTIMING_TYPE_NTP},
+  {"urn:mpeg:dash:utc:sntp:2012", GST_MPD_UTCTIMING_TYPE_SNTP},
+  {"urn:mpeg:dash:utc:http-head:2012", GST_MPD_UTCTIMING_TYPE_HTTP_HEAD},
+  {"urn:mpeg:dash:utc:http-xsdate:2012", GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE},
+  {"urn:mpeg:dash:utc:http-iso:2012", GST_MPD_UTCTIMING_TYPE_HTTP_ISO},
+  {"urn:mpeg:dash:utc:http-ntp:2012", GST_MPD_UTCTIMING_TYPE_HTTP_NTP},
+  {"urn:mpeg:dash:utc:direct:2012", GST_MPD_UTCTIMING_TYPE_DIRECT},
+  {NULL, 0}
+};
+
+/* GObject VMethods */
+
+static void
+gst_mpd_utctiming_node_finalize (GObject * object)
+{
+  GstMPDUTCTimingNode *self = GST_MPD_UTCTIMING_NODE (object);
+
+  g_strfreev (self->urls);
+
+  G_OBJECT_CLASS (gst_mpd_utctiming_node_parent_class)->finalize (object);
+}
+
+/* Base class */
+
+static xmlNodePtr
+gst_mpd_utc_timing_get_xml_node (GstMPDNode * node)
+{
+  xmlNodePtr utc_timing_xml_node = NULL;
+  gchar *value = NULL;
+  GstMPDUTCTimingNode *self = GST_MPD_UTCTIMING_NODE (node);
+
+  utc_timing_xml_node = xmlNewNode (NULL, (xmlChar *) "UTCTiming");
+
+  if (self->method) {
+    gst_xml_helper_set_prop_string (utc_timing_xml_node, "schemeiduri",
+        (gchar *) gst_mpd_utctiming_get_scheme_id_uri (self->method));
+  }
+  if (self->urls) {
+    value = g_strjoinv (" ", self->urls);
+    gst_xml_helper_set_prop_string (utc_timing_xml_node, "value", value);
+    g_free (value);
+  }
+
+  return utc_timing_xml_node;
+}
+
+static void
+gst_mpd_utctiming_node_class_init (GstMPDUTCTimingNodeClass * klass)
+{
+  GObjectClass *object_class;
+  GstMPDNodeClass *m_klass;
+
+  object_class = G_OBJECT_CLASS (klass);
+  m_klass = GST_MPD_NODE_CLASS (klass);
+
+  object_class->finalize = gst_mpd_utctiming_node_finalize;
+
+  m_klass->get_xml_node = gst_mpd_utc_timing_get_xml_node;
+}
+
+static void
+gst_mpd_utctiming_node_init (GstMPDUTCTimingNode * self)
+{
+  self->method = 0;
+  self->urls = NULL;
+}
+
+GstMPDUTCTimingNode *
+gst_mpd_utctiming_node_new (void)
+{
+  return g_object_new (GST_TYPE_MPD_UTCTIMING_NODE, NULL);
+}
+
+void
+gst_mpd_utctiming_node_free (GstMPDUTCTimingNode * self)
+{
+  if (self)
+    gst_object_unref (self);
+}
+
+const gchar *
+gst_mpd_utctiming_get_scheme_id_uri (GstMPDUTCTimingType type)
+{
+  int i;
+  for (i = 0; gst_mpd_utctiming_methods[i].name; ++i) {
+    if (type == gst_mpd_utctiming_methods[i].method)
+      return gst_mpd_utctiming_methods[i].name;
+  }
+  return NULL;
+}
+
+GstMPDUTCTimingType
+gst_mpd_utctiming_get_method (gchar * schemeIDURI)
+{
+  int i;
+  for (i = 0; gst_mpd_utctiming_methods[i].name; ++i) {
+    if (g_ascii_strncasecmp (gst_mpd_utctiming_methods[i].name,
+            schemeIDURI, strlen (gst_mpd_utctiming_methods[i].name)) == 0)
+      return gst_mpd_utctiming_methods[i].method;
+  }
+  return GST_MPD_UTCTIMING_TYPE_UNKNOWN;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdutctimingnode.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstmpdutctimingnode.h
new file mode 100644 (file)
index 0000000..e90dfa9
--- /dev/null
@@ -0,0 +1,71 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GSTMPDUTCTIMINGNODE_H__
+#define __GSTMPDUTCTIMINGNODE_H__
+
+#include <gst/gst.h>
+#include "gstmpdnode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPD_UTCTIMING_NODE gst_mpd_utctiming_node_get_type ()
+G_DECLARE_FINAL_TYPE (GstMPDUTCTimingNode2, gst_mpd_utctiming_node, GST, MPD_UTCTIMING_NODE, GstMPDNode)
+
+typedef GstMPDUTCTimingNode2 GstMPDUTCTimingNode;
+typedef GstMPDUTCTimingNode2Class GstMPDUTCTimingNodeClass;
+
+typedef enum
+{
+  GST_MPD_UTCTIMING_TYPE_UNKNOWN     = 0x00,
+  GST_MPD_UTCTIMING_TYPE_NTP         = 0x01,
+  GST_MPD_UTCTIMING_TYPE_SNTP        = 0x02,
+  GST_MPD_UTCTIMING_TYPE_HTTP_HEAD   = 0x04,
+  GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE = 0x08,
+  GST_MPD_UTCTIMING_TYPE_HTTP_ISO    = 0x10,
+  GST_MPD_UTCTIMING_TYPE_HTTP_NTP    = 0x20,
+  GST_MPD_UTCTIMING_TYPE_DIRECT      = 0x40
+} GstMPDUTCTimingType;
+
+struct GstMPDUTCTimingMethod
+{
+  const gchar *name;
+  GstMPDUTCTimingType method;
+};
+
+struct _GstMPDUTCTimingNode2
+{
+  GstObject parent_instance;
+  GstMPDUTCTimingType method;
+  /* NULL terminated array of strings */
+  gchar **urls;
+  /* TODO add missing fields such as weight etc.*/
+};
+
+
+GstMPDUTCTimingNode * gst_mpd_utctiming_node_new (void);
+void gst_mpd_utctiming_node_free (GstMPDUTCTimingNode* self);
+
+const gchar* gst_mpd_utctiming_get_scheme_id_uri (GstMPDUTCTimingType type);
+GstMPDUTCTimingType gst_mpd_utctiming_get_method (gchar* schemeIDURI);
+
+G_END_DECLS
+
+#endif /* __GSTMPDUTCTIMINGNODE_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstxmlhelper.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstxmlhelper.c
new file mode 100644 (file)
index 0000000..fac3dae
--- /dev/null
@@ -0,0 +1,1259 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+ *
+ */
+
+#include "gstxmlhelper.h"
+
+#define GST_CAT_DEFAULT gst_dash_demux2_debug
+
+#define XML_HELPER_MINUTE_TO_SEC       60
+#define XML_HELPER_HOUR_TO_SEC         (60 * XML_HELPER_MINUTE_TO_SEC)
+#define XML_HELPER_DAY_TO_SEC          (24 * XML_HELPER_HOUR_TO_SEC)
+#define XML_HELPER_MONTH_TO_SEC        (30 * XML_HELPER_DAY_TO_SEC)
+#define XML_HELPER_YEAR_TO_SEC         (365 * XML_HELPER_DAY_TO_SEC)
+#define XML_HELPER_MS_TO_SEC(time)     ((time) / 1000)
+/* static methods */
+/* this function computes decimals * 10 ^ (3 - pos) */
+static guint
+_mpd_helper_convert_to_millisecs (guint decimals, gint pos)
+{
+  guint num = 1, den = 1;
+  gint i = 3 - pos;
+
+  while (i < 0) {
+    den *= 10;
+    i++;
+  }
+  while (i > 0) {
+    num *= 10;
+    i--;
+  }
+  /* if i == 0 we have exactly 3 decimals and nothing to do */
+  return decimals * num / den;
+}
+
+static gboolean
+_mpd_helper_accumulate (guint64 * v, guint64 mul, guint64 add)
+{
+  guint64 tmp;
+
+  if (*v > G_MAXUINT64 / mul)
+    return FALSE;
+  tmp = *v * mul;
+  if (tmp > G_MAXUINT64 - add)
+    return FALSE;
+  *v = tmp + add;
+  return TRUE;
+}
+
+/*
+  Duration Data Type
+
+  The duration data type is used to specify a time interval.
+
+  The time interval is specified in the following form "-PnYnMnDTnHnMnS" where:
+
+    * - indicates the negative sign (optional)
+    * P indicates the period (required)
+    * nY indicates the number of years
+    * nM indicates the number of months
+    * nD indicates the number of days
+    * T indicates the start of a time section (required if you are going to specify hours, minutes, or seconds)
+    * nH indicates the number of hours
+    * nM indicates the number of minutes
+    * nS indicates the number of seconds
+*/
+static gboolean
+_mpd_helper_parse_duration (const char *str, guint64 * value)
+{
+  gint ret, len, pos, posT;
+  gint years = -1, months = -1, days = -1, hours = -1, minutes = -1, seconds =
+      -1, decimals = -1, read;
+  gboolean have_ms = FALSE;
+  guint64 tmp_value;
+
+  len = strlen (str);
+  GST_TRACE ("duration: %s, len %d", str, len);
+  if (strspn (str, "PT0123456789., \tHMDSY") < len) {
+    GST_WARNING ("Invalid character found: '%s'", str);
+    goto error;
+  }
+  /* skip leading/trailing whitespace */
+  while (g_ascii_isspace (str[0])) {
+    str++;
+    len--;
+  }
+  while (len > 0 && g_ascii_isspace (str[len - 1]))
+    --len;
+
+  /* read "P" for period */
+  if (str[0] != 'P') {
+    GST_WARNING ("P not found at the beginning of the string!");
+    goto error;
+  }
+  str++;
+  len--;
+
+  /* read "T" for time (if present) */
+  posT = strcspn (str, "T");
+  len -= posT;
+  if (posT > 0) {
+    /* there is some room between P and T, so there must be a period section */
+    /* read years, months, days */
+    do {
+      GST_TRACE ("parsing substring %s", str);
+      pos = strcspn (str, "YMD");
+      ret = sscanf (str, "%u", &read);
+      if (ret != 1) {
+        GST_WARNING ("can not read integer value from string %s!", str);
+        goto error;
+      }
+      switch (str[pos]) {
+        case 'Y':
+          if (years != -1 || months != -1 || days != -1) {
+            GST_WARNING ("year, month or day was already set");
+            goto error;
+          }
+          years = read;
+          break;
+        case 'M':
+          if (months != -1 || days != -1) {
+            GST_WARNING ("month or day was already set");
+            goto error;
+          }
+          months = read;
+          if (months >= 12) {
+            GST_WARNING ("Month out of range");
+            goto error;
+          }
+          break;
+        case 'D':
+          if (days != -1) {
+            GST_WARNING ("day was already set");
+            goto error;
+          }
+          days = read;
+          if (days >= 31) {
+            GST_WARNING ("Day out of range");
+            goto error;
+          }
+          break;
+        default:
+          GST_WARNING ("unexpected char %c!", str[pos]);
+          goto error;
+          break;
+      }
+      GST_TRACE ("read number %u type %c", read, str[pos]);
+      str += (pos + 1);
+      posT -= (pos + 1);
+    } while (posT > 0);
+  }
+
+  if (years == -1)
+    years = 0;
+  if (months == -1)
+    months = 0;
+  if (days == -1)
+    days = 0;
+
+  GST_TRACE ("Y:M:D=%d:%d:%d", years, months, days);
+
+  /* read "T" for time (if present) */
+  /* here T is at pos == 0 */
+  str++;
+  len--;
+  pos = 0;
+  if (pos < len) {
+    /* T found, there is a time section */
+    /* read hours, minutes, seconds, hundredths of second */
+    do {
+      GST_TRACE ("parsing substring %s", str);
+      pos = strcspn (str, "HMS,.");
+      ret = sscanf (str, "%u", &read);
+      if (ret != 1) {
+        GST_WARNING ("can not read integer value from string %s!", str);
+        goto error;
+      }
+      switch (str[pos]) {
+        case 'H':
+          if (hours != -1 || minutes != -1 || seconds != -1) {
+            GST_WARNING ("hour, minute or second was already set");
+            goto error;
+          }
+          hours = read;
+          if (hours >= 24) {
+            GST_WARNING ("Hour out of range");
+            goto error;
+          }
+          break;
+        case 'M':
+          if (minutes != -1 || seconds != -1) {
+            GST_WARNING ("minute or second was already set");
+            goto error;
+          }
+          minutes = read;
+          if (minutes >= 60) {
+            GST_WARNING ("Minute out of range");
+            goto error;
+          }
+          break;
+        case 'S':
+          if (have_ms) {
+            /* we have read the decimal part of the seconds */
+            decimals = _mpd_helper_convert_to_millisecs (read, pos);
+            GST_TRACE ("decimal number %u (%d digits) -> %d ms", read, pos,
+                decimals);
+          } else {
+            if (seconds != -1) {
+              GST_WARNING ("second was already set");
+              goto error;
+            }
+            /* no decimals */
+            seconds = read;
+          }
+          break;
+        case '.':
+        case ',':
+          /* we have read the integer part of a decimal number in seconds */
+          if (seconds != -1) {
+            GST_WARNING ("second was already set");
+            goto error;
+          }
+          seconds = read;
+          have_ms = TRUE;
+          break;
+        default:
+          GST_WARNING ("unexpected char %c!", str[pos]);
+          goto error;
+          break;
+      }
+      GST_TRACE ("read number %u type %c", read, str[pos]);
+      str += pos + 1;
+      len -= (pos + 1);
+    } while (len > 0);
+  }
+
+  if (hours == -1)
+    hours = 0;
+  if (minutes == -1)
+    minutes = 0;
+  if (seconds == -1)
+    seconds = 0;
+  if (decimals == -1)
+    decimals = 0;
+  GST_TRACE ("H:M:S.MS=%d:%d:%d.%03d", hours, minutes, seconds, decimals);
+
+  tmp_value = 0;
+  if (!_mpd_helper_accumulate (&tmp_value, 1, years)
+      || !_mpd_helper_accumulate (&tmp_value, 365, months * 30)
+      || !_mpd_helper_accumulate (&tmp_value, 1, days)
+      || !_mpd_helper_accumulate (&tmp_value, 24, hours)
+      || !_mpd_helper_accumulate (&tmp_value, 60, minutes)
+      || !_mpd_helper_accumulate (&tmp_value, 60, seconds)
+      || !_mpd_helper_accumulate (&tmp_value, 1000, decimals))
+    goto error;
+
+  /* ensure it can be converted from milliseconds to nanoseconds */
+  if (tmp_value > G_MAXUINT64 / 1000000)
+    goto error;
+
+  *value = tmp_value;
+  return TRUE;
+
+error:
+  return FALSE;
+}
+
+static gboolean
+_mpd_helper_validate_no_whitespace (const char *s)
+{
+  return !strpbrk (s, "\r\n\t ");
+}
+
+/* API */
+
+GstXMLRange *
+gst_xml_helper_clone_range (GstXMLRange * range)
+{
+  GstXMLRange *clone = NULL;
+
+  if (range) {
+    clone = g_slice_new0 (GstXMLRange);
+    clone->first_byte_pos = range->first_byte_pos;
+    clone->last_byte_pos = range->last_byte_pos;
+  }
+
+  return clone;
+}
+
+GstXMLRatio *
+gst_xml_helper_clone_ratio (GstXMLRatio * ratio)
+{
+  GstXMLRatio *clone = NULL;
+
+  if (ratio) {
+    clone = g_slice_new0 (GstXMLRatio);
+    clone->num = ratio->num;
+    clone->den = ratio->den;
+  }
+
+  return clone;
+}
+
+GstXMLFrameRate *
+gst_xml_helper_clone_frame_rate (GstXMLFrameRate * frameRate)
+{
+  GstXMLFrameRate *clone = NULL;
+
+  if (frameRate) {
+    clone = g_slice_new0 (GstXMLFrameRate);
+    clone->num = frameRate->num;
+    clone->den = frameRate->den;
+  }
+
+  return clone;
+}
+
+/* XML property get method */
+gboolean
+gst_xml_helper_get_prop_validated_string (xmlNode * a_node,
+    const gchar * property_name, gchar ** property_value,
+    gboolean (*validate) (const char *))
+{
+  xmlChar *prop_string;
+  gboolean exists = FALSE;
+
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    if (validate && !(*validate) ((const char *) prop_string)) {
+      GST_WARNING ("Validation failure: %s", prop_string);
+      xmlFree (prop_string);
+      return FALSE;
+    }
+    *property_value = (gchar *) prop_string;
+    exists = TRUE;
+    GST_LOG (" - %s: %s", property_name, prop_string);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_xml_helper_get_ns_prop_string (xmlNode * a_node,
+    const gchar * ns_name, const gchar * property_name, gchar ** property_value)
+{
+  xmlChar *prop_string;
+  gboolean exists = FALSE;
+
+  prop_string =
+      xmlGetNsProp (a_node, (const xmlChar *) property_name,
+      (const xmlChar *) ns_name);
+  if (prop_string) {
+    *property_value = (gchar *) prop_string;
+    exists = TRUE;
+    GST_LOG (" - %s:%s: %s", ns_name, property_name, prop_string);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_xml_helper_get_prop_string (xmlNode * a_node,
+    const gchar * property_name, gchar ** property_value)
+{
+  return gst_xml_helper_get_prop_validated_string (a_node, property_name,
+      property_value, NULL);
+}
+
+gboolean
+gst_xml_helper_get_prop_string_vector_type (xmlNode * a_node,
+    const gchar * property_name, gchar *** property_value)
+{
+  xmlChar *prop_string;
+  gchar **prop_string_vector = NULL;
+  guint i = 0;
+  gboolean exists = FALSE;
+
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    prop_string_vector = g_strsplit ((gchar *) prop_string, " ", -1);
+    if (prop_string_vector) {
+      exists = TRUE;
+      *property_value = prop_string_vector;
+      GST_LOG (" - %s:", property_name);
+      while (prop_string_vector[i]) {
+        GST_LOG ("    %s", prop_string_vector[i]);
+        i++;
+      }
+    } else {
+      GST_WARNING ("Scan of string vector property failed!");
+    }
+    xmlFree (prop_string);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_xml_helper_get_prop_signed_integer (xmlNode * a_node,
+    const gchar * property_name, gint default_val, gint * property_value)
+{
+  xmlChar *prop_string;
+  gboolean exists = FALSE;
+
+  *property_value = default_val;
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    if (sscanf ((const gchar *) prop_string, "%d", property_value) == 1) {
+      exists = TRUE;
+      GST_LOG (" - %s: %d", property_name, *property_value);
+    } else {
+      GST_WARNING
+          ("failed to parse signed integer property %s from xml string %s",
+          property_name, prop_string);
+    }
+    xmlFree (prop_string);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_xml_helper_get_prop_unsigned_integer (xmlNode * a_node,
+    const gchar * property_name, guint default_val, guint * property_value)
+{
+  xmlChar *prop_string;
+  gboolean exists = FALSE;
+
+  *property_value = default_val;
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    if (sscanf ((gchar *) prop_string, "%u", property_value) == 1 &&
+        strstr ((gchar *) prop_string, "-") == NULL) {
+      exists = TRUE;
+      GST_LOG (" - %s: %u", property_name, *property_value);
+    } else {
+      GST_WARNING
+          ("failed to parse unsigned integer property %s from xml string %s",
+          property_name, prop_string);
+      /* sscanf might have written to *property_value. Restore to default */
+      *property_value = default_val;
+    }
+    xmlFree (prop_string);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_xml_helper_get_prop_unsigned_integer_64 (xmlNode * a_node,
+    const gchar * property_name, guint64 default_val, guint64 * property_value)
+{
+  xmlChar *prop_string;
+  gboolean exists = FALSE;
+
+  *property_value = default_val;
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    if (g_ascii_string_to_unsigned ((gchar *) prop_string, 10, 0, G_MAXUINT64,
+            property_value, NULL)) {
+      exists = TRUE;
+      GST_LOG (" - %s: %" G_GUINT64_FORMAT, property_name, *property_value);
+    } else {
+      GST_WARNING
+          ("failed to parse unsigned integer property %s from xml string %s",
+          property_name, prop_string);
+    }
+    xmlFree (prop_string);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_xml_helper_get_prop_uint_vector_type (xmlNode * a_node,
+    const gchar * property_name, guint ** property_value, guint * value_size)
+{
+  xmlChar *prop_string;
+  gchar **str_vector;
+  guint *prop_uint_vector = NULL, i;
+  gboolean exists = FALSE;
+
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    str_vector = g_strsplit ((gchar *) prop_string, " ", -1);
+    if (str_vector) {
+      *value_size = g_strv_length (str_vector);
+      prop_uint_vector = g_malloc (*value_size * sizeof (guint));
+      if (prop_uint_vector) {
+        exists = TRUE;
+        GST_LOG (" - %s:", property_name);
+        for (i = 0; i < *value_size; i++) {
+          if (sscanf ((gchar *) str_vector[i], "%u", &prop_uint_vector[i]) == 1
+              && strstr (str_vector[i], "-") == NULL) {
+            GST_LOG ("    %u", prop_uint_vector[i]);
+          } else {
+            GST_WARNING
+                ("failed to parse uint vector type property %s from xml string %s",
+                property_name, str_vector[i]);
+            /* there is no special value to put in prop_uint_vector[i] to
+             * signal it is invalid, so we just clean everything and return
+             * FALSE
+             */
+            g_free (prop_uint_vector);
+            prop_uint_vector = NULL;
+            exists = FALSE;
+            break;
+          }
+        }
+        *property_value = prop_uint_vector;
+      } else {
+        GST_WARNING ("Array allocation failed!");
+      }
+    } else {
+      GST_WARNING ("Scan of uint vector property failed!");
+    }
+    xmlFree (prop_string);
+    g_strfreev (str_vector);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_xml_helper_get_prop_double (xmlNode * a_node,
+    const gchar * property_name, gdouble * property_value)
+{
+  xmlChar *prop_string;
+  gboolean exists = FALSE;
+
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    if (sscanf ((gchar *) prop_string, "%lf", property_value) == 1) {
+      exists = TRUE;
+      GST_LOG (" - %s: %lf", property_name, *property_value);
+    } else {
+      GST_WARNING ("failed to parse double property %s from xml string %s",
+          property_name, prop_string);
+    }
+    xmlFree (prop_string);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_xml_helper_get_prop_boolean (xmlNode * a_node,
+    const gchar * property_name, gboolean default_val,
+    gboolean * property_value)
+{
+  xmlChar *prop_string;
+  gboolean exists = FALSE;
+
+  *property_value = default_val;
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    if (xmlStrcmp (prop_string, (xmlChar *) "false") == 0) {
+      exists = TRUE;
+      *property_value = FALSE;
+      GST_LOG (" - %s: false", property_name);
+    } else if (xmlStrcmp (prop_string, (xmlChar *) "true") == 0) {
+      exists = TRUE;
+      *property_value = TRUE;
+      GST_LOG (" - %s: true", property_name);
+    } else {
+      GST_WARNING ("failed to parse boolean property %s from xml string %s",
+          property_name, prop_string);
+    }
+    xmlFree (prop_string);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_xml_helper_get_prop_range (xmlNode * a_node,
+    const gchar * property_name, GstXMLRange ** property_value)
+{
+  xmlChar *prop_string;
+  guint64 first_byte_pos = 0, last_byte_pos = -1;
+  guint len, pos;
+  gchar *str;
+  gboolean exists = FALSE;
+
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    len = xmlStrlen (prop_string);
+    str = (gchar *) prop_string;
+    GST_TRACE ("range: %s, len %d", str, len);
+
+    /* find "-" */
+    pos = strcspn (str, "-");
+    if (pos >= len) {
+      GST_TRACE ("pos %d >= len %d", pos, len);
+      goto error;
+    }
+    if (pos == 0) {
+      GST_TRACE ("pos == 0, but first_byte_pos is not optional");
+      goto error;
+    }
+
+    /* read first_byte_pos */
+
+    /* replace str[pos] with '\0' since we only want to read the
+     * first_byte_pos, and g_ascii_string_to_unsigned requires the entire
+     * string to be a single number, which is exactly what we want */
+    str[pos] = '\0';
+    if (!g_ascii_string_to_unsigned (str, 10, 0, G_MAXUINT64, &first_byte_pos,
+            NULL)) {
+      /* restore the '-' sign */
+      str[pos] = '-';
+      goto error;
+    }
+    /* restore the '-' sign */
+    str[pos] = '-';
+
+    /* read last_byte_pos, which is optional */
+    if (pos < (len - 1) && !g_ascii_string_to_unsigned (str + pos + 1, 10, 0,
+            G_MAXUINT64, &last_byte_pos, NULL)) {
+      goto error;
+    }
+    /* malloc return data structure */
+    *property_value = g_slice_new0 (GstXMLRange);
+    exists = TRUE;
+    (*property_value)->first_byte_pos = first_byte_pos;
+    (*property_value)->last_byte_pos = last_byte_pos;
+    xmlFree (prop_string);
+    GST_LOG (" - %s: %" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT,
+        property_name, first_byte_pos, last_byte_pos);
+  }
+
+  return exists;
+
+error:
+  GST_WARNING ("failed to parse property %s from xml string %s", property_name,
+      prop_string);
+  xmlFree (prop_string);
+  return FALSE;
+}
+
+gboolean
+gst_xml_helper_get_prop_ratio (xmlNode * a_node,
+    const gchar * property_name, GstXMLRatio ** property_value)
+{
+  xmlChar *prop_string;
+  guint num = 0, den = 1;
+  guint len, pos;
+  gchar *str;
+  gboolean exists = FALSE;
+
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    len = xmlStrlen (prop_string);
+    str = (gchar *) prop_string;
+    GST_TRACE ("ratio: %s, len %d", str, len);
+
+    /* read ":" */
+    pos = strcspn (str, ":");
+    if (pos >= len) {
+      GST_TRACE ("pos %d >= len %d", pos, len);
+      goto error;
+    }
+    /* search for negative sign */
+    if (strstr (str, "-") != NULL) {
+      goto error;
+    }
+    /* read num */
+    if (pos != 0) {
+      if (sscanf (str, "%u", &num) != 1) {
+        goto error;
+      }
+    }
+    /* read den */
+    if (pos < (len - 1)) {
+      if (sscanf (str + pos + 1, "%u", &den) != 1) {
+        goto error;
+      }
+    }
+    /* malloc return data structure */
+    *property_value = g_slice_new0 (GstXMLRatio);
+    exists = TRUE;
+    (*property_value)->num = num;
+    (*property_value)->den = den;
+    xmlFree (prop_string);
+    GST_LOG (" - %s: %u:%u", property_name, num, den);
+  }
+
+  return exists;
+
+error:
+  GST_WARNING ("failed to parse property %s from xml string %s", property_name,
+      prop_string);
+  xmlFree (prop_string);
+  return FALSE;
+}
+
+gboolean
+gst_xml_helper_get_prop_framerate (xmlNode * a_node,
+    const gchar * property_name, GstXMLFrameRate ** property_value)
+{
+  xmlChar *prop_string;
+  guint num = 0, den = 1;
+  guint len, pos;
+  gchar *str;
+  gboolean exists = FALSE;
+
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    len = xmlStrlen (prop_string);
+    str = (gchar *) prop_string;
+    GST_TRACE ("framerate: %s, len %d", str, len);
+
+    /* search for negative sign */
+    if (strstr (str, "-") != NULL) {
+      goto error;
+    }
+
+    /* read "/" if available */
+    pos = strcspn (str, "/");
+    /* read num */
+    if (pos != 0) {
+      if (sscanf (str, "%u", &num) != 1) {
+        goto error;
+      }
+    }
+    /* read den (if available) */
+    if (pos < (len - 1)) {
+      if (sscanf (str + pos + 1, "%u", &den) != 1) {
+        goto error;
+      }
+    }
+    /* alloc return data structure */
+    *property_value = g_slice_new0 (GstXMLFrameRate);
+    exists = TRUE;
+    (*property_value)->num = num;
+    (*property_value)->den = den;
+    xmlFree (prop_string);
+    if (den == 1)
+      GST_LOG (" - %s: %u", property_name, num);
+    else
+      GST_LOG (" - %s: %u/%u", property_name, num, den);
+  }
+
+  return exists;
+
+error:
+  GST_WARNING ("failed to parse property %s from xml string %s", property_name,
+      prop_string);
+  xmlFree (prop_string);
+  return FALSE;
+}
+
+gboolean
+gst_xml_helper_get_prop_cond_uint (xmlNode * a_node,
+    const gchar * property_name, GstXMLConditionalUintType ** property_value)
+{
+  xmlChar *prop_string;
+  gchar *str;
+  gboolean flag;
+  guint val;
+  gboolean exists = FALSE;
+
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    str = (gchar *) prop_string;
+    GST_TRACE ("conditional uint: %s", str);
+
+    if (strcmp (str, "false") == 0) {
+      flag = FALSE;
+      val = 0;
+    } else if (strcmp (str, "true") == 0) {
+      flag = TRUE;
+      val = 0;
+    } else {
+      flag = TRUE;
+      if (sscanf (str, "%u", &val) != 1 || strstr (str, "-") != NULL)
+        goto error;
+    }
+
+    /* alloc return data structure */
+    *property_value = g_slice_new0 (GstXMLConditionalUintType);
+    exists = TRUE;
+    (*property_value)->flag = flag;
+    (*property_value)->value = val;
+    xmlFree (prop_string);
+    GST_LOG (" - %s: flag=%s val=%u", property_name, flag ? "true" : "false",
+        val);
+  }
+
+  return exists;
+
+error:
+  GST_WARNING ("failed to parse property %s from xml string %s", property_name,
+      prop_string);
+  xmlFree (prop_string);
+  return FALSE;
+}
+
+gboolean
+gst_xml_helper_get_prop_dateTime (xmlNode * a_node,
+    const gchar * property_name, GstDateTime ** property_value)
+{
+  xmlChar *prop_string;
+  gchar *str;
+  gint ret, pos;
+  gint year, month, day, hour, minute;
+  gdouble second;
+  gboolean exists = FALSE;
+  gfloat tzoffset = 0.0;
+  gint gmt_offset_hour = -99, gmt_offset_min = -99;
+
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    str = (gchar *) prop_string;
+    GST_TRACE ("dateTime: %s, len %d", str, xmlStrlen (prop_string));
+    /* parse year */
+    ret = sscanf (str, "%d", &year);
+    if (ret != 1 || year <= 0)
+      goto error;
+    pos = strcspn (str, "-");
+    str += (pos + 1);
+    GST_TRACE (" - year %d", year);
+    /* parse month */
+    ret = sscanf (str, "%d", &month);
+    if (ret != 1 || month <= 0)
+      goto error;
+    pos = strcspn (str, "-");
+    str += (pos + 1);
+    GST_TRACE (" - month %d", month);
+    /* parse day */
+    ret = sscanf (str, "%d", &day);
+    if (ret != 1 || day <= 0)
+      goto error;
+    pos = strcspn (str, "T");
+    str += (pos + 1);
+    GST_TRACE (" - day %d", day);
+    /* parse hour */
+    ret = sscanf (str, "%d", &hour);
+    if (ret != 1 || hour < 0)
+      goto error;
+    pos = strcspn (str, ":");
+    str += (pos + 1);
+    GST_TRACE (" - hour %d", hour);
+    /* parse minute */
+    ret = sscanf (str, "%d", &minute);
+    if (ret != 1 || minute < 0)
+      goto error;
+    pos = strcspn (str, ":");
+    str += (pos + 1);
+    GST_TRACE (" - minute %d", minute);
+    /* parse second */
+    ret = sscanf (str, "%lf", &second);
+    if (ret != 1 || second < 0)
+      goto error;
+    GST_TRACE (" - second %lf", second);
+
+    GST_LOG (" - %s: %4d/%02d/%02d %02d:%02d:%09.6lf", property_name,
+        year, month, day, hour, minute, second);
+
+    if (strrchr (str, '+') || strrchr (str, '-')) {
+      /* reuse some code from gst-plugins-base/gst-libs/gst/tag/gstxmptag.c */
+      gint gmt_offset = -1;
+      gchar *plus_pos = NULL;
+      gchar *neg_pos = NULL;
+      gchar *pos = NULL;
+
+      GST_LOG ("Checking for timezone information");
+
+      /* check if there is timezone info */
+      plus_pos = strrchr (str, '+');
+      neg_pos = strrchr (str, '-');
+      if (plus_pos)
+        pos = plus_pos + 1;
+      else if (neg_pos)
+        pos = neg_pos + 1;
+
+      if (pos && strlen (pos) >= 3) {
+        gint ret_tz;
+        if (pos[2] == ':')
+          ret_tz = sscanf (pos, "%d:%d", &gmt_offset_hour, &gmt_offset_min);
+        else
+          ret_tz = sscanf (pos, "%02d%02d", &gmt_offset_hour, &gmt_offset_min);
+
+        GST_DEBUG ("Parsing timezone: %s", pos);
+
+        if (ret_tz == 2) {
+          if (neg_pos != NULL && neg_pos + 1 == pos) {
+            gmt_offset_hour *= -1;
+            gmt_offset_min *= -1;
+          }
+          gmt_offset = gmt_offset_hour * 60 + gmt_offset_min;
+
+          tzoffset = gmt_offset / 60.0;
+
+          GST_LOG ("Timezone offset: %f (%d minutes)", tzoffset, gmt_offset);
+        } else
+          GST_WARNING ("Failed to parse timezone information");
+      }
+    }
+
+    exists = TRUE;
+    *property_value =
+        gst_date_time_new (tzoffset, year, month, day, hour, minute, second);
+    xmlFree (prop_string);
+  }
+
+  return exists;
+
+error:
+  GST_WARNING ("failed to parse property %s from xml string %s", property_name,
+      prop_string);
+  xmlFree (prop_string);
+  return FALSE;
+}
+
+gboolean
+gst_xml_helper_get_prop_duration (xmlNode * a_node,
+    const gchar * property_name, guint64 default_value,
+    guint64 * property_value)
+{
+  xmlChar *prop_string;
+  gchar *str;
+  gboolean exists = FALSE;
+
+  *property_value = default_value;
+  prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
+  if (prop_string) {
+    str = (gchar *) prop_string;
+    if (!_mpd_helper_parse_duration (str, property_value))
+      goto error;
+    GST_LOG (" - %s: %" G_GUINT64_FORMAT, property_name, *property_value);
+    xmlFree (prop_string);
+    exists = TRUE;
+  }
+  return exists;
+
+error:
+  xmlFree (prop_string);
+  return FALSE;
+}
+
+gboolean
+gst_xml_helper_get_node_content (xmlNode * a_node, gchar ** content)
+{
+  xmlChar *node_content = NULL;
+  gboolean exists = FALSE;
+
+  node_content = xmlNodeGetContent (a_node);
+  if (node_content) {
+    exists = TRUE;
+    *content = (gchar *) node_content;
+    GST_LOG (" - %s: %s", a_node->name, *content);
+  }
+
+  return exists;
+}
+
+gboolean
+gst_xml_helper_get_node_as_string (xmlNode * a_node, gchar ** content)
+{
+  gboolean exists = FALSE;
+  const char *txt_encoding;
+  xmlOutputBufferPtr out_buf;
+
+  txt_encoding = (const char *) a_node->doc->encoding;
+  out_buf = xmlAllocOutputBuffer (NULL);
+  g_assert (out_buf != NULL);
+  xmlNodeDumpOutput (out_buf, a_node->doc, a_node, 0, 0, txt_encoding);
+  (void) xmlOutputBufferFlush (out_buf);
+#ifdef LIBXML2_NEW_BUFFER
+  if (xmlOutputBufferGetSize (out_buf) > 0) {
+    *content =
+        (gchar *) xmlStrndup (xmlOutputBufferGetContent (out_buf),
+        xmlOutputBufferGetSize (out_buf));
+    exists = TRUE;
+  }
+#else
+  if (out_buf->conv && out_buf->conv->use > 0) {
+    *content =
+        (gchar *) xmlStrndup (out_buf->conv->content, out_buf->conv->use);
+    exists = TRUE;
+  } else if (out_buf->buffer && out_buf->buffer->use > 0) {
+    *content =
+        (gchar *) xmlStrndup (out_buf->buffer->content, out_buf->buffer->use);
+    exists = TRUE;
+  }
+#endif // LIBXML2_NEW_BUFFER
+  (void) xmlOutputBufferClose (out_buf);
+
+  if (exists) {
+    GST_LOG (" - %s: %s", a_node->name, *content);
+  }
+  return exists;
+}
+
+gchar *
+gst_xml_helper_get_node_namespace (xmlNode * a_node, const gchar * prefix)
+{
+  xmlNs *curr_ns;
+  gchar *namespace = NULL;
+
+  if (prefix == NULL) {
+    /* return the default namespace */
+    if (a_node->ns) {
+      namespace = xmlMemStrdup ((const gchar *) a_node->ns->href);
+      if (namespace) {
+        GST_LOG (" - default namespace: %s", namespace);
+      }
+    }
+  } else {
+    /* look for the specified prefix in the namespace list */
+    for (curr_ns = a_node->ns; curr_ns; curr_ns = curr_ns->next) {
+      if (xmlStrcmp (curr_ns->prefix, (xmlChar *) prefix) == 0) {
+        namespace = xmlMemStrdup ((const gchar *) curr_ns->href);
+        if (namespace) {
+          GST_LOG (" - %s namespace: %s", curr_ns->prefix, curr_ns->href);
+        }
+      }
+    }
+  }
+
+  return namespace;
+}
+
+gboolean
+gst_xml_helper_get_prop_string_stripped (xmlNode * a_node,
+    const gchar * property_name, gchar ** property_value)
+{
+  gboolean ret;
+  ret = gst_xml_helper_get_prop_string (a_node, property_name, property_value);
+  if (ret)
+    *property_value = g_strstrip (*property_value);
+  return ret;
+}
+
+gboolean
+gst_xml_helper_get_prop_string_no_whitespace (xmlNode * a_node,
+    const gchar * property_name, gchar ** property_value)
+{
+  return gst_xml_helper_get_prop_validated_string (a_node, property_name,
+      property_value, _mpd_helper_validate_no_whitespace);
+}
+
+
+/* XML property set method */
+
+void
+gst_xml_helper_set_prop_string (xmlNodePtr node, const gchar * name,
+    gchar * value)
+{
+  if (value)
+    xmlSetProp (node, (xmlChar *) name, (xmlChar *) value);
+}
+
+void
+gst_xml_helper_set_prop_boolean (xmlNodePtr node, const gchar * name,
+    gboolean value)
+{
+  if (value)
+    xmlSetProp (node, (xmlChar *) name, (xmlChar *) "true");
+  else
+    xmlSetProp (node, (xmlChar *) name, (xmlChar *) "false");
+}
+
+void
+gst_xml_helper_set_prop_int (xmlNodePtr node, const gchar * name, gint value)
+{
+  gchar *text;
+  text = g_strdup_printf ("%d", value);
+  xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+  g_free (text);
+}
+
+void
+gst_xml_helper_set_prop_uint (xmlNodePtr node, const gchar * name, guint value)
+{
+  gchar *text;
+  text = g_strdup_printf ("%d", value);
+  xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+  g_free (text);
+}
+
+void
+gst_xml_helper_set_prop_int64 (xmlNodePtr node, const gchar * name,
+    gint64 value)
+{
+  gchar *text;
+  text = g_strdup_printf ("%" G_GINT64_FORMAT, value);
+  xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+  g_free (text);
+}
+
+void
+gst_xml_helper_set_prop_uint64 (xmlNodePtr node, const gchar * name,
+    guint64 value)
+{
+  gchar *text;
+  text = g_strdup_printf ("%" G_GUINT64_FORMAT, value);
+  xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+  g_free (text);
+}
+
+void
+gst_xml_helper_set_prop_double (xmlNodePtr node, const gchar * name,
+    gdouble value)
+{
+  gchar *text;
+  text = g_strdup_printf ("%lf", value);
+  xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+  g_free (text);
+}
+
+void
+gst_xml_helper_set_prop_uint_vector_type (xmlNode * node, const gchar * name,
+    guint * value, guint value_size)
+{
+  int i;
+  gchar *text = NULL;
+  gchar *prev;
+  gchar *temp;
+
+  for (i = 0; i < value_size; i++) {
+    temp = g_strdup_printf ("%d", value[i]);
+    prev = text;
+    text = g_strjoin (" ", text, prev, NULL);
+    g_free (prev);
+    g_free (temp);
+  }
+
+  if (text) {
+    xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+    g_free (text);
+  }
+}
+
+void
+gst_xml_helper_set_prop_date_time (xmlNodePtr node, const gchar * name,
+    GstDateTime * value)
+{
+  gchar *text;
+  if (value) {
+    text = gst_date_time_to_iso8601_string (value);
+    xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+    g_free (text);
+  }
+}
+
+void
+gst_xml_helper_set_prop_duration (xmlNode * node, const gchar * name,
+    guint64 value)
+{
+  gchar *text;
+  gint years, months, days, hours, minutes, seconds, milliseconds;
+  if (value) {
+    years = (gint) (XML_HELPER_MS_TO_SEC (value) / (XML_HELPER_YEAR_TO_SEC));
+    months =
+        (gint) ((XML_HELPER_MS_TO_SEC (value) % XML_HELPER_YEAR_TO_SEC) /
+        XML_HELPER_MONTH_TO_SEC);
+    days =
+        (gint) ((XML_HELPER_MS_TO_SEC (value) % XML_HELPER_MONTH_TO_SEC) /
+        XML_HELPER_DAY_TO_SEC);
+    hours =
+        (gint) ((XML_HELPER_MS_TO_SEC (value) % XML_HELPER_DAY_TO_SEC) /
+        XML_HELPER_HOUR_TO_SEC);
+    minutes =
+        (gint) ((XML_HELPER_MS_TO_SEC (value) % XML_HELPER_HOUR_TO_SEC) /
+        XML_HELPER_MINUTE_TO_SEC);
+    seconds = (gint) (XML_HELPER_MS_TO_SEC (value) % XML_HELPER_MINUTE_TO_SEC);
+    milliseconds = value % 1000;
+
+    text =
+        g_strdup_printf ("P%dY%dM%dDT%dH%dM%d.%dS", years, months, days, hours,
+        minutes, seconds, milliseconds);
+    GST_LOG ("duration %" G_GUINT64_FORMAT " -> %s", value, text);
+    xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+    g_free (text);
+  }
+}
+
+void
+gst_xml_helper_set_prop_ratio (xmlNodePtr node, const gchar * name,
+    GstXMLRatio * value)
+{
+  gchar *text;
+  if (value) {
+    text = g_strdup_printf ("%d:%d", value->num, value->den);
+    xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+    g_free (text);
+  }
+}
+
+
+void
+gst_xml_helper_set_prop_framerate (xmlNodePtr node, const gchar * name,
+    GstXMLFrameRate * value)
+{
+  gchar *text;
+  if (value) {
+    text = g_strdup_printf ("%d/%d", value->num, value->den);
+    xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+    g_free (text);
+  }
+}
+
+void
+gst_xml_helper_set_prop_range (xmlNodePtr node, const gchar * name,
+    GstXMLRange * value)
+{
+  gchar *text;
+  if (value) {
+    text =
+        g_strdup_printf ("%" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT,
+        value->first_byte_pos, value->last_byte_pos);
+    xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+    g_free (text);
+  }
+}
+
+void
+gst_xml_helper_set_prop_cond_uint (xmlNodePtr node, const gchar * name,
+    GstXMLConditionalUintType * cond)
+{
+  gchar *text;
+  if (cond) {
+    if (cond->flag)
+      if (cond->value)
+        text = g_strdup_printf ("%d", cond->value);
+      else
+        text = g_strdup_printf ("%s", "true");
+    else
+      text = g_strdup_printf ("%s", "false");
+
+    xmlSetProp (node, (xmlChar *) name, (xmlChar *) text);
+    g_free (text);
+  }
+}
+
+void
+gst_xml_helper_set_content (xmlNodePtr node, gchar * content)
+{
+  if (content)
+    xmlNodeSetContent (node, (xmlChar *) content);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstxmlhelper.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/dash/gstxmlhelper.h
new file mode 100644 (file)
index 0000000..6537cf0
--- /dev/null
@@ -0,0 +1,133 @@
+/* GStreamer
+ *
+ * Copyright (C) 2019 Collabora Ltd.
+ *   Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GST_XMLHELPER_H__
+#define __GST_XMLHELPER_H__
+
+#include <libxml/parser.h>
+#include <libxml/tree.h>
+#include <gst/gst.h>
+#include "gstdash_debug.h"
+
+G_BEGIN_DECLS
+
+typedef struct _GstXMLRange                  GstXMLRange;
+typedef struct _GstXMLRatio                  GstXMLRatio;
+typedef struct _GstXMLFrameRate              GstXMLFrameRate;
+typedef struct _GstXMLConditionalUintType    GstXMLConditionalUintType;
+
+struct _GstXMLRange
+{
+  guint64 first_byte_pos;
+  guint64 last_byte_pos;
+};
+
+struct _GstXMLRatio
+{
+  guint num;
+  guint den;
+};
+
+struct _GstXMLFrameRate
+{
+  guint num;
+  guint den;
+};
+
+struct _GstXMLConditionalUintType
+{
+  gboolean flag;
+  guint value;
+};
+
+GstXMLRange *gst_xml_helper_clone_range (GstXMLRange * range);
+GstXMLRatio *gst_xml_helper_clone_ratio (GstXMLRatio * ratio);
+GstXMLFrameRate *gst_xml_helper_clone_frame_rate (GstXMLFrameRate * frameRate);
+
+/* XML property get method */
+gboolean gst_xml_helper_get_prop_validated_string (xmlNode * a_node,
+    const gchar * property_name, gchar ** property_value,
+    gboolean (*validator) (const char *));
+gboolean gst_xml_helper_get_prop_string (xmlNode * a_node,
+    const gchar * property_name, gchar ** property_value);
+gboolean gst_xml_helper_get_prop_string_stripped (xmlNode * a_node,
+    const gchar * property_name, gchar ** property_value);
+gboolean gst_xml_helper_get_ns_prop_string (xmlNode * a_node,
+    const gchar * ns_name, const gchar * property_name,
+    gchar ** property_value);
+gboolean gst_xml_helper_get_prop_string_vector_type (xmlNode * a_node,
+    const gchar * property_name, gchar *** property_value);
+gboolean gst_xml_helper_get_prop_signed_integer (xmlNode * a_node,
+    const gchar * property_name, gint default_val, gint * property_value);
+gboolean gst_xml_helper_get_prop_unsigned_integer (xmlNode * a_node,
+    const gchar * property_name, guint default_val, guint * property_value);
+gboolean gst_xml_helper_get_prop_unsigned_integer_64 (xmlNode *
+    a_node, const gchar * property_name, guint64 default_val,
+    guint64 * property_value);
+gboolean gst_xml_helper_get_prop_uint_vector_type (xmlNode * a_node,
+    const gchar * property_name, guint ** property_value, guint * value_size);
+gboolean gst_xml_helper_get_prop_double (xmlNode * a_node,
+    const gchar * property_name, gdouble * property_value);
+gboolean gst_xml_helper_get_prop_boolean (xmlNode * a_node,
+    const gchar * property_name, gboolean default_val,
+    gboolean * property_value);
+gboolean gst_xml_helper_get_prop_range (xmlNode * a_node,
+    const gchar * property_name, GstXMLRange ** property_value);
+gboolean gst_xml_helper_get_prop_ratio (xmlNode * a_node,
+    const gchar * property_name, GstXMLRatio ** property_value);
+gboolean gst_xml_helper_get_prop_framerate (xmlNode * a_node,
+    const gchar * property_name, GstXMLFrameRate ** property_value);
+gboolean gst_xml_helper_get_prop_cond_uint (xmlNode * a_node,
+    const gchar * property_name, GstXMLConditionalUintType ** property_value);
+gboolean gst_xml_helper_get_prop_dateTime (xmlNode * a_node,
+    const gchar * property_name, GstDateTime ** property_value);
+gboolean gst_xml_helper_get_prop_duration (xmlNode * a_node,
+    const gchar * property_name, guint64 default_value,
+    guint64 * property_value);
+gboolean gst_xml_helper_get_prop_string_no_whitespace (xmlNode * a_node,
+    const gchar * property_name, gchar ** property_value);
+
+/* XML node get method */
+gboolean gst_xml_helper_get_node_content (xmlNode * a_node,
+    gchar ** content);
+gchar *gst_xml_helper_get_node_namespace (xmlNode * a_node,
+    const gchar * prefix);
+gboolean gst_xml_helper_get_node_as_string (xmlNode * a_node,
+    gchar ** content);
+
+/* XML property set method */
+void gst_xml_helper_set_prop_string (xmlNodePtr node, const gchar * name, gchar* value);
+void gst_xml_helper_set_prop_boolean (xmlNodePtr node, const gchar * name, gboolean value);
+void gst_xml_helper_set_prop_int (xmlNodePtr root, const gchar * name, gint value);
+void gst_xml_helper_set_prop_uint (xmlNodePtr root, const gchar * name, guint value);
+void gst_xml_helper_set_prop_int64 (xmlNodePtr node, const gchar * name, gint64 value);
+void gst_xml_helper_set_prop_uint64 (xmlNodePtr node, const gchar * name, guint64 value);
+void gst_xml_helper_set_prop_uint_vector_type (xmlNode * a_node,  const gchar * name, guint * value, guint value_size);
+void gst_xml_helper_set_prop_double (xmlNodePtr node, const gchar * name, gdouble value);
+void gst_xml_helper_set_prop_date_time (xmlNodePtr node, const gchar * name, GstDateTime* value);
+void gst_xml_helper_set_prop_duration (xmlNode * node, const gchar * name, guint64 value);
+void gst_xml_helper_set_prop_ratio (xmlNodePtr node, const gchar * name, GstXMLRatio* value);
+void gst_xml_helper_set_prop_framerate (xmlNodePtr node, const gchar * name, GstXMLFrameRate* value);
+void gst_xml_helper_set_prop_range (xmlNodePtr node, const gchar * name, GstXMLRange* value);
+void gst_xml_helper_set_prop_cond_uint (xmlNode * a_node, const gchar * property_name, GstXMLConditionalUintType * cond);
+void gst_xml_helper_set_content (xmlNodePtr node, gchar * content);
+
+G_END_DECLS
+#endif /* __GST_XMLHELPER_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/downloadhelper.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/downloadhelper.c
new file mode 100644 (file)
index 0000000..3541364
--- /dev/null
@@ -0,0 +1,1004 @@
+/* GStreamer
+ * Copyright (C) 2021-2022 Jan Schmidt <jan@centricular.com>
+ *
+ * downloadhelper.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * Youshould have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "downloadhelper.h"
+#include "../soup/gstsouploader.h"
+
+GST_DEBUG_CATEGORY_EXTERN (adaptivedemux2_debug);
+#define GST_CAT_DEFAULT adaptivedemux2_debug
+
+#define CHUNK_BUFFER_SIZE 32768
+
+typedef struct DownloadHelperTransfer DownloadHelperTransfer;
+
+struct DownloadHelper
+{
+  GThread *transfer_thread;
+
+  gboolean running;
+
+  GstAdaptiveDemuxClock *clock;
+
+  GMainContext *transfer_context;
+  GMainLoop *loop;
+  SoupSession *session;
+
+  GMutex transfer_lock;
+  GArray *active_transfers;
+
+  GAsyncQueue *transfer_requests;
+  GSource *transfer_requests_source;
+
+  gchar *referer;
+  gchar *user_agent;
+  gchar **cookies;
+};
+
+struct DownloadHelperTransfer
+{
+  DownloadHelper *dh;
+
+  gboolean blocking;
+  gboolean complete;
+  gboolean progress_pending;
+
+  GCond cond;
+
+  GCancellable *cancellable;
+
+  SoupMessage *msg;
+  gboolean request_sent;
+
+  /* Current read buffer */
+  char *read_buffer;
+  guint64 read_buffer_size;
+  guint64 read_position;        /* Start in bytes of the read_buffer */
+
+  DownloadRequest *request;
+};
+
+static void
+free_transfer (DownloadHelperTransfer * transfer)
+{
+  DownloadRequest *request = transfer->request;
+
+  if (request)
+    download_request_unref (request);
+
+  if (transfer->blocking)
+    g_cond_clear (&transfer->cond);
+
+  g_object_unref (transfer->msg);
+  g_free (transfer->read_buffer);
+  g_free (transfer);
+}
+
+static void
+transfer_completion_cb (gpointer src_object, GAsyncResult * res,
+    gpointer user_data)
+{
+  DownloadHelperTransfer *transfer = g_task_get_task_data ((GTask *) res);
+  DownloadRequest *request = transfer->request;
+
+  if (transfer->blocking)
+    return;                     /* Somehow a completion got signalled for a blocking request */
+
+  download_request_lock (request);
+  request->in_use = FALSE;
+  GST_LOG ("Despatching completion for transfer %p request %p", transfer,
+      request);
+  download_request_despatch_completion (request);
+  download_request_unlock (request);
+}
+
+static gboolean
+transfer_report_progress_cb (gpointer task)
+{
+  DownloadHelperTransfer *transfer;
+  DownloadRequest *request;
+
+  /* Already completed - late callback */
+  if (g_task_get_completed (task))
+    return FALSE;
+
+  transfer = g_task_get_task_data (task);
+  request = transfer->request;
+
+  download_request_lock (request);
+  if (request->send_progress) {
+    GST_LOG ("Despatching progress for transfer %p request %p", transfer,
+        request);
+    download_request_despatch_progresss (request);
+  }
+  transfer->progress_pending = FALSE;
+  download_request_unlock (request);
+
+  return FALSE;
+}
+
+static GTask *
+transfer_task_new (DownloadHelper * dh, DownloadRequest * request,
+    SoupMessage * msg, gboolean blocking)
+{
+  GTask *transfer_task = NULL;
+  DownloadHelperTransfer *transfer = g_new0 (DownloadHelperTransfer, 1);
+
+  transfer->blocking = blocking;
+  if (transfer->blocking)
+    g_cond_init (&transfer->cond);
+
+  transfer->cancellable = g_cancellable_new ();
+  transfer->request = download_request_ref (request);
+
+  transfer->dh = dh;
+  transfer->msg = msg;
+
+  transfer_task =
+      g_task_new (NULL, transfer->cancellable,
+      (GAsyncReadyCallback) transfer_completion_cb, NULL);
+  g_task_set_task_data (transfer_task, transfer,
+      (GDestroyNotify) free_transfer);
+
+  return transfer_task;
+}
+
+static void
+release_transfer_task_by_ref (GTask ** transfer_task)
+{
+  g_object_unref (*transfer_task);
+}
+
+/* Called with download_request lock held */
+static void
+transfer_task_report_progress (GTask * transfer_task)
+{
+  DownloadHelperTransfer *transfer = g_task_get_task_data (transfer_task);
+  DownloadRequest *request = transfer->request;
+  GSource *idle_source;
+
+  if (transfer->progress_pending == TRUE || !request->send_progress)
+    return;
+
+  /* There's no progress cb pending and this download wants reports, so
+   * attach an idle source */
+  transfer->progress_pending = TRUE;
+  idle_source = g_idle_source_new ();
+  g_task_attach_source (transfer_task, idle_source,
+      transfer_report_progress_cb);
+  g_source_unref (idle_source);
+}
+
+static void
+finish_transfer_task (DownloadHelper * dh, GTask * transfer_task,
+    GError * error)
+{
+  int i;
+
+  g_mutex_lock (&dh->transfer_lock);
+  for (i = dh->active_transfers->len - 1; i >= 0; i--) {
+    if (transfer_task == g_array_index (dh->active_transfers, GTask *, i)) {
+      DownloadHelperTransfer *transfer = g_task_get_task_data (transfer_task);
+
+      transfer->complete = TRUE;
+
+      if (transfer->blocking)
+        g_cond_broadcast (&transfer->cond);
+      else if (error != NULL)
+        g_task_return_error (transfer_task, error);
+      else
+        g_task_return_boolean (transfer_task, TRUE);
+
+      /* This drops the task ref: */
+      g_array_remove_index_fast (dh->active_transfers, i);
+      g_mutex_unlock (&dh->transfer_lock);
+      return;
+    }
+  }
+  g_mutex_unlock (&dh->transfer_lock);
+
+  GST_WARNING ("Did not find transfer %p in the active transfer list",
+      transfer_task);
+}
+
+static gboolean
+new_read_buffer (DownloadHelperTransfer * transfer)
+{
+  gint buffer_size = CHUNK_BUFFER_SIZE;
+#if 0
+  DownloadRequest *request = transfer->request;
+
+  if (request->range_end != -1) {
+    if (request->range_end <= transfer->read_position) {
+      transfer->read_buffer = NULL;
+      transfer->read_buffer_size = 0;
+      return FALSE;
+    }
+    if (request->range_end - transfer->read_position < buffer_size)
+      buffer_size = request->range_end - transfer->read_position + 1;
+  }
+#endif
+
+  transfer->read_buffer = g_new (char, buffer_size);
+  transfer->read_buffer_size = buffer_size;
+  return TRUE;
+}
+
+static void
+on_read_ready (GObject * source, GAsyncResult * result, gpointer user_data)
+{
+  GTask *transfer_task = user_data;
+  DownloadHelperTransfer *transfer = g_task_get_task_data (transfer_task);
+
+  DownloadHelper *dh = transfer->dh;
+  DownloadRequest *request = transfer->request;
+
+  GInputStream *in = G_INPUT_STREAM (source);
+  GError *error = NULL;
+  gsize bytes_read = 0;
+
+  GstClockTime now = gst_adaptive_demux_clock_get_time (dh->clock);
+
+  g_input_stream_read_all_finish (in, result, &bytes_read, &error);
+
+  download_request_lock (request);
+
+  if (error) {
+    g_free (transfer->read_buffer);
+    transfer->read_buffer = NULL;
+
+    if (!g_cancellable_is_cancelled (transfer->cancellable)) {
+      GST_ERROR ("Failed to read stream: %s", error->message);
+      if (request->state != DOWNLOAD_REQUEST_STATE_UNSENT)
+        request->state = DOWNLOAD_REQUEST_STATE_ERROR;
+      finish_transfer_task (dh, transfer_task, error);
+    } else {
+      /* Ignore error from cancelled operation */
+      g_error_free (error);
+      finish_transfer_task (dh, transfer_task, NULL);
+    }
+    download_request_unlock (request);
+
+    return;
+  }
+
+  if (!bytes_read) {
+    goto finish_transfer;
+  } else {
+    GstBuffer *gst_buffer =
+        gst_buffer_new_wrapped (transfer->read_buffer, bytes_read);
+
+    GST_BUFFER_OFFSET (gst_buffer) = transfer->read_position;
+    transfer->read_position += bytes_read;
+    transfer->read_buffer = NULL;
+
+    /* Clip the buffer to within the range */
+    if (GST_BUFFER_OFFSET (gst_buffer) < request->range_start) {
+      if (transfer->read_position <= request->range_start) {
+        GST_DEBUG ("Discarding %" G_GSIZE_FORMAT
+            " bytes entirely before requested range",
+            gst_buffer_get_size (gst_buffer));
+        /* This buffer is completely before the range start, discard it */
+        gst_buffer_unref (gst_buffer);
+        gst_buffer = NULL;
+      } else {
+        GST_DEBUG ("Clipping first %" G_GINT64_FORMAT
+            " bytes before requested range",
+            request->range_start - GST_BUFFER_OFFSET (gst_buffer));
+
+        /* This buffer is partially within the requested range, clip the beginning */
+        gst_buffer_resize (gst_buffer,
+            request->range_start - GST_BUFFER_OFFSET (gst_buffer), -1);
+        GST_BUFFER_OFFSET (gst_buffer) = request->range_start;
+      }
+    }
+
+    if (request->download_start_time == GST_CLOCK_TIME_NONE) {
+      GST_LOG ("Got first data for URI %s", request->uri);
+      request->download_start_time = now;
+    }
+
+    if (gst_buffer != NULL) {
+      /* Unsent means cancellation is in progress, so don't override
+       * the state. Otherwise make sure it is LOADING */
+      if (request->state != DOWNLOAD_REQUEST_STATE_UNSENT)
+        request->state = DOWNLOAD_REQUEST_STATE_LOADING;
+
+      GST_LOG ("Adding %u bytes to buffer",
+          (guint) (gst_buffer_get_size (gst_buffer)));
+
+      download_request_add_buffer (request, gst_buffer);
+
+      transfer_task_report_progress (transfer_task);
+    }
+
+    /* Resubmit the read request to get more */
+    if (!new_read_buffer (transfer))
+      goto finish_transfer;
+
+    g_main_context_push_thread_default (dh->transfer_context);
+    g_input_stream_read_all_async (in, transfer->read_buffer,
+        transfer->read_buffer_size, G_PRIORITY_DEFAULT, transfer->cancellable,
+        on_read_ready, transfer_task);
+    g_main_context_pop_thread_default (dh->transfer_context);
+  }
+
+  download_request_unlock (request);
+  return;
+
+finish_transfer:
+  if (request->in_use && !g_cancellable_is_cancelled (transfer->cancellable)) {
+    SoupStatus status_code = _soup_message_get_status (transfer->msg);
+
+    GST_LOG ("request complete. Code %d URI %s range %" G_GINT64_FORMAT " %"
+        G_GINT64_FORMAT, status_code, request->uri,
+        request->range_start, request->range_end);
+
+    if (SOUP_STATUS_IS_SUCCESSFUL (status_code)
+        || SOUP_STATUS_IS_REDIRECTION (status_code))
+      request->state = DOWNLOAD_REQUEST_STATE_COMPLETE;
+    else
+      request->state = DOWNLOAD_REQUEST_STATE_ERROR;
+  }
+  request->download_end_time = now;
+
+  g_free (transfer->read_buffer);
+  transfer->read_buffer = NULL;
+
+  download_request_unlock (request);
+
+  finish_transfer_task (dh, transfer_task, NULL);
+}
+
+static void
+http_header_to_structure (const gchar * name, const gchar * value,
+    gpointer user_data)
+{
+  GstStructure *headers = user_data;
+  const GValue *gv;
+
+  if (!g_utf8_validate (name, -1, NULL) || !g_utf8_validate (value, -1, NULL))
+    return;
+
+  gv = gst_structure_get_value (headers, name);
+  if (gv && GST_VALUE_HOLDS_ARRAY (gv)) {
+    GValue v = G_VALUE_INIT;
+
+    g_value_init (&v, G_TYPE_STRING);
+    g_value_set_string (&v, value);
+    gst_value_array_append_value ((GValue *) gv, &v);
+    g_value_unset (&v);
+  } else if (gv && G_VALUE_HOLDS_STRING (gv)) {
+    GValue arr = G_VALUE_INIT;
+    GValue v = G_VALUE_INIT;
+    const gchar *old_value = g_value_get_string (gv);
+
+    g_value_init (&arr, GST_TYPE_ARRAY);
+    g_value_init (&v, G_TYPE_STRING);
+    g_value_set_string (&v, old_value);
+    gst_value_array_append_value (&arr, &v);
+    g_value_set_string (&v, value);
+    gst_value_array_append_value (&arr, &v);
+
+    gst_structure_set_value (headers, name, &arr);
+    g_value_unset (&v);
+    g_value_unset (&arr);
+  } else {
+    gst_structure_set (headers, name, G_TYPE_STRING, value, NULL);
+  }
+}
+
+static void
+soup_msg_restarted_cb (SoupMessage * msg, gpointer user_data)
+{
+  GTask *transfer_task = user_data;
+  DownloadHelperTransfer *transfer = g_task_get_task_data (transfer_task);
+  DownloadRequest *request = transfer->request;
+  SoupStatus status = _soup_message_get_status (msg);
+
+  if (SOUP_STATUS_IS_REDIRECTION (status)) {
+    char *redirect_uri = gst_soup_message_uri_to_string (msg);
+    gboolean redirect_permanent = (status == SOUP_STATUS_MOVED_PERMANENTLY);
+
+    GST_DEBUG ("%u redirect to \"%s\" (permanent %d)",
+        status, redirect_uri, redirect_permanent);
+
+    download_request_lock (request);
+    g_free (request->redirect_uri);
+    request->redirect_uri = redirect_uri;
+    request->redirect_permanent = redirect_permanent;
+    download_request_unlock (request);
+  }
+}
+
+static GstStructure *
+handle_response_headers (DownloadHelperTransfer * transfer)
+{
+  DownloadRequest *request = transfer->request;
+  SoupMessage *msg = transfer->msg;
+  SoupMessageHeaders *response_headers;
+  GstStructure *http_headers, *headers;
+
+  http_headers = gst_structure_new_empty ("http-headers");
+
+#if 0
+  if (msg->status_code == SOUP_STATUS_PROXY_AUTHENTICATION_REQUIRED &&
+      src->proxy_id && src->proxy_pw) {
+    /* wait for authenticate callback */
+    return GST_FLOW_OK;
+  }
+
+  if (src->redirection_uri)
+    gst_structure_set (http_headers, "redirection-uri", G_TYPE_STRING,
+        src->redirection_uri, NULL);
+#endif
+
+  headers = gst_structure_new_empty ("request-headers");
+  _soup_message_headers_foreach (_soup_message_get_request_headers (msg),
+      http_header_to_structure, headers);
+  gst_structure_set (http_headers, "request-headers", GST_TYPE_STRUCTURE,
+      headers, NULL);
+  gst_structure_free (headers);
+  headers = gst_structure_new_empty ("response-headers");
+  response_headers = _soup_message_get_response_headers (msg);
+  _soup_message_headers_foreach (response_headers, http_header_to_structure,
+      headers);
+  gst_structure_set (http_headers, "response-headers", GST_TYPE_STRUCTURE,
+      headers, NULL);
+  gst_structure_free (headers);
+
+#if 0
+  if (msg->status_code == SOUP_STATUS_UNAUTHORIZED) {
+    /* force an error */
+    gst_structure_free (http_headers);
+    return gst_soup_http_src_parse_status (msg, src);
+  }
+#endif
+
+  /* Parse Content-Length. */
+  if (SOUP_STATUS_IS_SUCCESSFUL (_soup_message_get_status (msg)) &&
+      (_soup_message_headers_get_encoding (response_headers) ==
+          SOUP_ENCODING_CONTENT_LENGTH)) {
+    request->content_length =
+        _soup_message_headers_get_content_length (response_headers);
+  }
+  /* Parse Content-Range in a partial content response to set our initial read_position */
+  transfer->read_position = 0;
+  if (_soup_message_get_status (msg) == SOUP_STATUS_PARTIAL_CONTENT) {
+    goffset start, end;
+    if (_soup_message_headers_get_content_range (response_headers, &start,
+            &end, NULL)) {
+      GST_DEBUG ("Content-Range response %" G_GOFFSET_FORMAT "-%"
+          G_GOFFSET_FORMAT, start, end);
+      transfer->read_position = start;
+    }
+  }
+  if (transfer->read_position != request->range_start) {
+    GST_WARNING ("Server did not respect our range request for range %"
+        G_GINT64_FORMAT " to %" G_GINT64_FORMAT " - starting at offset %"
+        G_GUINT64_FORMAT, request->range_start, request->range_end,
+        transfer->read_position);
+  }
+
+  return http_headers;
+}
+
+static void
+on_request_sent (GObject * source, GAsyncResult * result, gpointer user_data)
+{
+  GTask *transfer_task = user_data;
+  DownloadHelperTransfer *transfer = g_task_get_task_data (transfer_task);
+
+  DownloadHelper *dh = transfer->dh;
+  DownloadRequest *request = transfer->request;
+  SoupMessage *msg = transfer->msg;
+  GError *error = NULL;
+
+  GInputStream *in =
+      _soup_session_send_finish ((SoupSession *) source, result, &error);
+
+  download_request_lock (request);
+
+  if (in == NULL) {
+    request->status_code = _soup_message_get_status (msg);
+
+    if (!g_cancellable_is_cancelled (transfer->cancellable)) {
+      GST_LOG ("request errored. Code %d URI %s range %" G_GINT64_FORMAT " %"
+          G_GINT64_FORMAT, request->status_code, request->uri,
+          request->range_start, request->range_end);
+
+      if (request->state != DOWNLOAD_REQUEST_STATE_UNSENT)
+        request->state = DOWNLOAD_REQUEST_STATE_ERROR;
+      finish_transfer_task (dh, transfer_task, error);
+    } else {
+      /* Ignore error from cancelled operation */
+      g_error_free (error);
+      finish_transfer_task (dh, transfer_task, NULL);
+    }
+    download_request_unlock (request);
+
+    /* No async callback queued - the transfer is done */
+    finish_transfer_task (dh, transfer_task, error);
+    return;
+  }
+
+  /* If the state went back to UNSENT, we were cancelled so don't override it */
+  if (request->state != DOWNLOAD_REQUEST_STATE_UNSENT &&
+      request->state != DOWNLOAD_REQUEST_STATE_HEADERS_RECEIVED) {
+
+    request->state = DOWNLOAD_REQUEST_STATE_HEADERS_RECEIVED;
+    request->status_code = _soup_message_get_status (msg);
+    request->headers = handle_response_headers (transfer);
+
+    if (SOUP_STATUS_IS_SUCCESSFUL (request->status_code)
+        || SOUP_STATUS_IS_REDIRECTION (request->status_code)) {
+      request->state = DOWNLOAD_REQUEST_STATE_HEADERS_RECEIVED;
+      transfer_task_report_progress (transfer_task);
+    } else {
+      goto finish_transfer_error;
+    }
+  }
+
+  if (!new_read_buffer (transfer))
+    goto finish_transfer_error;
+
+  download_request_unlock (request);
+
+  g_main_context_push_thread_default (dh->transfer_context);
+  g_input_stream_read_all_async (in, transfer->read_buffer,
+      transfer->read_buffer_size, G_PRIORITY_DEFAULT, transfer->cancellable,
+      on_read_ready, transfer_task);
+  g_main_context_pop_thread_default (dh->transfer_context);
+
+  g_object_unref (in);
+  return;
+
+finish_transfer_error:
+  request->download_end_time = gst_adaptive_demux_clock_get_time (dh->clock);
+
+  if (request->in_use && !g_cancellable_is_cancelled (transfer->cancellable)) {
+    GST_LOG ("request complete. Code %d URI %s range %" G_GINT64_FORMAT " %"
+        G_GINT64_FORMAT, _soup_message_get_status (msg), request->uri,
+        request->range_start, request->range_end);
+    if (request->state != DOWNLOAD_REQUEST_STATE_UNSENT)
+      request->state = DOWNLOAD_REQUEST_STATE_ERROR;
+  }
+
+  g_free (transfer->read_buffer);
+  transfer->read_buffer = NULL;
+
+  download_request_unlock (request);
+  finish_transfer_task (dh, transfer_task, NULL);
+  g_object_unref (in);
+}
+
+DownloadHelper *
+downloadhelper_new (GstAdaptiveDemuxClock * clock)
+{
+  DownloadHelper *dh = g_new0 (DownloadHelper, 1);
+
+  dh->transfer_context = g_main_context_new ();
+  dh->loop = g_main_loop_new (dh->transfer_context, FALSE);
+
+  dh->clock = gst_adaptive_demux_clock_ref (clock);
+
+  g_mutex_init (&dh->transfer_lock);
+  dh->active_transfers = g_array_new (FALSE, FALSE, sizeof (GTask *));
+
+  g_array_set_clear_func (dh->active_transfers,
+      (GDestroyNotify) (release_transfer_task_by_ref));
+
+  dh->transfer_requests =
+      g_async_queue_new_full ((GDestroyNotify) g_object_unref);
+  dh->transfer_requests_source = NULL;
+
+  /* Set 10 second timeout. Any longer is likely
+   * an attempt to reuse an already closed connection */
+  dh->session = _soup_session_new_with_options ("timeout", 10, NULL);
+
+  return dh;
+}
+
+void
+downloadhelper_free (DownloadHelper * dh)
+{
+  downloadhelper_stop (dh);
+
+  if (dh->session)
+    g_object_unref (dh->session);
+  g_main_loop_unref (dh->loop);
+  g_main_context_unref (dh->transfer_context);
+
+  if (dh->clock)
+    gst_adaptive_demux_clock_unref (dh->clock);
+
+  g_array_free (dh->active_transfers, TRUE);
+  g_async_queue_unref (dh->transfer_requests);
+
+  g_free (dh->referer);
+  g_free (dh->user_agent);
+  g_strfreev (dh->cookies);
+
+  g_free (dh);
+}
+
+void
+downloadhelper_set_referer (DownloadHelper * dh, const gchar * referer)
+{
+  g_mutex_lock (&dh->transfer_lock);
+  g_free (dh->referer);
+  dh->referer = g_strdup (referer);
+  g_mutex_unlock (&dh->transfer_lock);
+}
+
+void
+downloadhelper_set_user_agent (DownloadHelper * dh, const gchar * user_agent)
+{
+  g_mutex_lock (&dh->transfer_lock);
+  g_free (dh->user_agent);
+  dh->user_agent = g_strdup (user_agent);
+  g_mutex_unlock (&dh->transfer_lock);
+}
+
+/* Takes ownership of the strv */
+void
+downloadhelper_set_cookies (DownloadHelper * dh, gchar ** cookies)
+{
+  g_mutex_lock (&dh->transfer_lock);
+  g_strfreev (dh->cookies);
+  dh->cookies = cookies;
+  g_mutex_unlock (&dh->transfer_lock);
+}
+
+/* Called with the transfer lock held */
+static void
+submit_transfer (DownloadHelper * dh, GTask * transfer_task)
+{
+  DownloadHelperTransfer *transfer = g_task_get_task_data (transfer_task);
+  DownloadRequest *request = transfer->request;
+
+  download_request_lock (request);
+  request->state = DOWNLOAD_REQUEST_STATE_OPEN;
+  request->download_request_time =
+      gst_adaptive_demux_clock_get_time (dh->clock);
+
+  GST_LOG ("Submitting request URI %s range %" G_GINT64_FORMAT " %"
+      G_GINT64_FORMAT, request->uri, request->range_start, request->range_end);
+
+  transfer_task_report_progress (transfer_task);
+  download_request_unlock (request);
+
+  _soup_session_send_async (dh->session, transfer->msg, transfer->cancellable,
+      on_request_sent, transfer_task);
+  g_array_append_val (dh->active_transfers, transfer_task);
+}
+
+/* Idle callback that submits all pending transfers */
+static gboolean
+submit_transfers_cb (DownloadHelper * dh)
+{
+  GTask *transfer;
+
+  g_mutex_lock (&dh->transfer_lock);
+  do {
+    transfer = g_async_queue_try_pop (dh->transfer_requests);
+    if (transfer) {
+      submit_transfer (dh, transfer);
+    }
+  } while (transfer != NULL);
+
+  /* FIXME: Use a PollFD like GWakeup instead? */
+  g_source_destroy (dh->transfer_requests_source);
+  g_source_unref (dh->transfer_requests_source);
+  dh->transfer_requests_source = NULL;
+
+  g_mutex_unlock (&dh->transfer_lock);
+
+  return G_SOURCE_REMOVE;
+}
+
+static gpointer
+dh_transfer_thread_func (gpointer data)
+{
+  DownloadHelper *dh = data;
+  GST_DEBUG ("DownloadHelper thread starting");
+
+  g_main_context_push_thread_default (dh->transfer_context);
+  g_main_loop_run (dh->loop);
+  g_main_context_pop_thread_default (dh->transfer_context);
+
+  GST_DEBUG ("Exiting DownloadHelper thread");
+  return NULL;
+}
+
+gboolean
+downloadhelper_start (DownloadHelper * dh)
+{
+  g_return_val_if_fail (dh->transfer_thread == NULL, FALSE);
+
+  g_mutex_lock (&dh->transfer_lock);
+  if (!dh->running) {
+
+    dh->transfer_thread =
+        g_thread_try_new ("adaptive-download-task", dh_transfer_thread_func, dh,
+        NULL);
+    dh->running = (dh->transfer_thread != NULL);
+  }
+  g_mutex_unlock (&dh->transfer_lock);
+
+  return dh->running;
+}
+
+void
+downloadhelper_stop (DownloadHelper * dh)
+{
+  int i;
+  GThread *transfer_thread = NULL;
+
+  GST_DEBUG ("Stopping DownloadHelper loop");
+
+  g_mutex_lock (&dh->transfer_lock);
+
+  dh->running = FALSE;
+
+  for (i = 0; i < dh->active_transfers->len; i++) {
+    GTask *transfer_task = g_array_index (dh->active_transfers, GTask *, i);
+    DownloadHelperTransfer *transfer = g_task_get_task_data (transfer_task);
+    g_cancellable_cancel (transfer->cancellable);
+  }
+
+  g_main_loop_quit (dh->loop);
+
+  transfer_thread = dh->transfer_thread;
+  dh->transfer_thread = NULL;
+
+  g_mutex_unlock (&dh->transfer_lock);
+
+  if (transfer_thread != NULL) {
+    g_thread_join (transfer_thread);
+  }
+
+  /* The transfer thread has exited at this point - any remaining transfers are unfinished
+   * and need cleaning up */
+  g_mutex_lock (&dh->transfer_lock);
+
+  for (i = 0; i < dh->active_transfers->len; i++) {
+    GTask *transfer_task = g_array_index (dh->active_transfers, GTask *, i);
+    DownloadHelperTransfer *transfer = g_task_get_task_data (transfer_task);
+    DownloadRequest *request = transfer->request;
+
+    download_request_lock (request);
+    /* Reset the state to UNSENT, to indicate cancellation, like an XMLHttpRequest does */
+    request->state = DOWNLOAD_REQUEST_STATE_UNSENT;
+    download_request_unlock (request);
+
+    transfer->complete = TRUE;
+    if (transfer->blocking)
+      g_cond_broadcast (&transfer->cond);
+    else
+      g_task_return_boolean (transfer_task, TRUE);
+  }
+
+  g_array_set_size (dh->active_transfers, 0);
+  g_mutex_unlock (&dh->transfer_lock);
+}
+
+gboolean
+downloadhelper_submit_request (DownloadHelper * dh,
+    const gchar * referer, DownloadFlags flags, DownloadRequest * request,
+    GError ** err)
+{
+  GTask *transfer_task = NULL;
+  const gchar *method;
+  SoupMessage *msg;
+  SoupMessageHeaders *msg_headers;
+  gboolean blocking = (flags & DOWNLOAD_FLAG_BLOCKING) != 0;
+
+  method =
+      (flags & DOWNLOAD_FLAG_HEADERS_ONLY) ? SOUP_METHOD_HEAD : SOUP_METHOD_GET;
+
+  download_request_lock (request);
+  if (request->in_use) {
+    GST_ERROR ("Request for URI %s reusing active request object",
+        request->uri);
+    download_request_unlock (request);
+    return FALSE;
+  }
+
+  /* Clear the state back to unsent */
+  request->state = DOWNLOAD_REQUEST_STATE_UNSENT;
+
+  msg = _soup_message_new (method, request->uri);
+  if (msg == NULL) {
+    g_set_error (err, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+        "Could not parse download URI %s", request->uri);
+
+    request->state = DOWNLOAD_REQUEST_STATE_ERROR;
+    download_request_unlock (request);
+
+    return FALSE;
+  }
+
+  /* NOTE: There was a bug where Akamai servers return the
+   * wrong result for a range request on small files. To avoid
+   * it if the range starts within the first KB of the file, just
+   * start at 0 instead */
+  if (request->range_start < 1024)
+    request->range_start = 0;
+
+  msg_headers = _soup_message_get_request_headers (msg);
+
+  if (request->range_start != 0 || request->range_end != -1) {
+    _soup_message_headers_set_range (msg_headers, request->range_start,
+        request->range_end);
+  }
+
+  download_request_unlock (request);
+
+  /* If resubmitting a request, clear any stale / unused data */
+  download_request_begin_download (request);
+
+  if ((flags & DOWNLOAD_FLAG_COMPRESS) == 0) {
+    _soup_message_disable_feature (msg, _soup_content_decoder_get_type ());
+  }
+  if (flags & DOWNLOAD_FLAG_FORCE_REFRESH) {
+    _soup_message_headers_append (msg_headers, "Cache-Control", "max-age=0");
+  }
+
+  /* Take the lock to protect header strings */
+  g_mutex_lock (&dh->transfer_lock);
+
+  if (referer != NULL) {
+    _soup_message_headers_append (msg_headers, "Referer", referer);
+  } else if (dh->referer != NULL) {
+    _soup_message_headers_append (msg_headers, "Referer", dh->referer);
+  }
+
+  if (dh->user_agent != NULL) {
+    _soup_message_headers_append (msg_headers, "User-Agent", dh->user_agent);
+  }
+
+  if (dh->cookies != NULL) {
+    gchar **cookie;
+
+    for (cookie = dh->cookies; *cookie != NULL; cookie++) {
+      _soup_message_headers_append (msg_headers, "Cookie", *cookie);
+    }
+  }
+
+  transfer_task = transfer_task_new (dh, request, msg, blocking);
+
+  if (!dh->running) {
+    /* The download helper was deactivated just as we went to dispatch this request.
+     * Abort and manually wake the request, as it never went in the active_transfer list */
+    g_mutex_unlock (&dh->transfer_lock);
+
+    download_request_lock (request);
+    request->state = DOWNLOAD_REQUEST_STATE_UNSENT;
+    request->in_use = FALSE;
+    download_request_unlock (request);
+
+    g_cancellable_cancel (g_task_get_cancellable (transfer_task));
+    g_task_return_error_if_cancelled (transfer_task);
+    g_object_unref (transfer_task);
+
+    return FALSE;
+  }
+
+  download_request_lock (request);
+  request->in_use = TRUE;
+  download_request_unlock (request);
+
+  g_signal_connect (msg, "restarted", G_CALLBACK (soup_msg_restarted_cb),
+      transfer_task);
+
+  /* Now send the request over to the main loop for actual submission */
+  GST_LOG ("Submitting transfer task %p", transfer_task);
+  g_async_queue_push (dh->transfer_requests, transfer_task);
+
+  /* No pending idle source to wake the transfer loop - so create one */
+  if (dh->transfer_requests_source == NULL) {
+    dh->transfer_requests_source = g_idle_source_new ();
+    g_source_set_callback (dh->transfer_requests_source,
+        (GSourceFunc) submit_transfers_cb, dh, NULL);
+    g_source_attach (dh->transfer_requests_source, dh->transfer_context);
+  }
+
+  if (blocking) {
+    DownloadHelperTransfer *transfer = g_task_get_task_data (transfer_task);
+
+    /* We need an extra ref on the task to make sure it stays alive.
+     * We pushed it in the async queue, but didn't unlock yet, so while
+     * we gave away our ref, the receiver can't have unreffed it */
+    g_object_ref (transfer_task);
+    while (!transfer->complete)
+      g_cond_wait (&transfer->cond, &dh->transfer_lock);
+    g_object_unref (transfer_task);
+  }
+
+  g_mutex_unlock (&dh->transfer_lock);
+
+  return TRUE;
+}
+
+void
+downloadhelper_cancel_request (DownloadHelper * dh, DownloadRequest * request)
+{
+  int i;
+
+  g_mutex_lock (&dh->transfer_lock);
+
+  download_request_lock (request);
+  if (!request->in_use)
+    goto out;
+
+  GST_DEBUG ("Cancelling request for URI %s range %" G_GINT64_FORMAT " %"
+      G_GINT64_FORMAT, request->uri, request->range_start, request->range_end);
+
+  request->state = DOWNLOAD_REQUEST_STATE_UNSENT;
+
+  for (i = dh->active_transfers->len - 1; i >= 0; i--) {
+    GTask *transfer_task = g_array_index (dh->active_transfers, GTask *, i);
+    DownloadHelperTransfer *transfer = g_task_get_task_data (transfer_task);
+
+    if (transfer->request == request) {
+      GST_DEBUG ("Found transfer %p for request for URI %s range %"
+          G_GINT64_FORMAT " %" G_GINT64_FORMAT, transfer, request->uri,
+          request->range_start, request->range_end);
+      g_cancellable_cancel (transfer->cancellable);
+      break;
+    }
+  }
+
+out:
+  download_request_unlock (request);
+  g_mutex_unlock (&dh->transfer_lock);
+}
+
+DownloadRequest *
+downloadhelper_fetch_uri_range (DownloadHelper * dh, const gchar * uri,
+    const gchar * referer, DownloadFlags flags, gint64 range_start,
+    gint64 range_end, GError ** err)
+{
+  DownloadRequest *request;
+
+  g_return_val_if_fail (uri != NULL, NULL);
+
+  GST_DEBUG ("Fetching URI %s range %" G_GINT64_FORMAT " %" G_GINT64_FORMAT,
+      uri, range_start, range_end);
+
+  flags |= DOWNLOAD_FLAG_BLOCKING;
+
+  request = download_request_new_uri_range (uri, range_start, range_end);
+
+  if (!downloadhelper_submit_request (dh, referer, flags, request, err)) {
+    download_request_unref (request);
+    return NULL;
+  }
+
+  return request;
+}
+
+DownloadRequest *
+downloadhelper_fetch_uri (DownloadHelper * dh, const gchar * uri,
+    const gchar * referer, DownloadFlags flags, GError ** err)
+{
+  return downloadhelper_fetch_uri_range (dh, uri, referer, flags, 0, -1, err);
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/downloadhelper.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/downloadhelper.h
new file mode 100644 (file)
index 0000000..a5abdeb
--- /dev/null
@@ -0,0 +1,67 @@
+/* GStreamer
+ * Copyright (C) 2021-2022 Jan Schmidt <jan@centricular.com>
+ *
+ * downloadhelper.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * Youshould have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <glib.h>
+
+#include "gstadaptivedemuxutils.h"
+#include "downloadrequest.h"
+
+#ifndef __DOWNLOADHELPER_H__
+#define __DOWNLOADHELPER_H__
+
+G_BEGIN_DECLS typedef struct DownloadHelper DownloadHelper;
+typedef enum DownloadFlags DownloadFlags;
+
+#define HTTP_STATUS_IS_SUCCESSFUL(s) ((s) >= 200 && (s) < 300)
+
+enum DownloadFlags
+{
+  DOWNLOAD_FLAG_NONE = 0,
+  DOWNLOAD_FLAG_COMPRESS = (1 << 0),
+  DOWNLOAD_FLAG_FORCE_REFRESH = (1 << 1),
+  DOWNLOAD_FLAG_HEADERS_ONLY = (1 << 2),
+  DOWNLOAD_FLAG_BLOCKING = (1 << 3),
+};
+
+DownloadHelper *downloadhelper_new (GstAdaptiveDemuxClock *clock);
+
+gboolean downloadhelper_start (DownloadHelper * dh);
+void downloadhelper_stop (DownloadHelper * dh);
+
+void downloadhelper_free (DownloadHelper * dh);
+
+void downloadhelper_set_referer (DownloadHelper * dh, const gchar *referer);
+void downloadhelper_set_user_agent (DownloadHelper * dh, const gchar *user_agent);
+void downloadhelper_set_cookies (DownloadHelper * dh, gchar **cookies);
+
+gboolean downloadhelper_submit_request (DownloadHelper * dh,
+    const gchar * referer, DownloadFlags flags, DownloadRequest * request,
+    GError ** err);
+void downloadhelper_cancel_request (DownloadHelper * dh, DownloadRequest *request);
+
+DownloadRequest *downloadhelper_fetch_uri (DownloadHelper * dh, const gchar * uri,
+    const gchar * referer, DownloadFlags flags, GError ** err);
+DownloadRequest *downloadhelper_fetch_uri_range (DownloadHelper * dh,
+    const gchar * uri, const gchar * referer, DownloadFlags flags,
+    gint64 range_start, gint64 range_end, GError ** err);
+
+G_END_DECLS
+#endif
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/downloadrequest.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/downloadrequest.c
new file mode 100644 (file)
index 0000000..8b6b233
--- /dev/null
@@ -0,0 +1,396 @@
+/* GStreamer
+ * Copyright (C) 2011 Andoni Morales Alastruey <ylatuya@gmail.com>
+ * Copyright (C) 2021-2022 Jan Schmidt <jan@centricular.com>
+ *
+ * gstrequest.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <glib.h>
+#include <gst/base/gsttypefindhelper.h>
+#include <gst/base/gstadapter.h>
+#include "downloadrequest.h"
+
+typedef struct _DownloadRequestPrivate DownloadRequestPrivate;
+
+struct _DownloadRequestPrivate
+{
+  DownloadRequest request;
+
+  GstBuffer *buffer;
+  GstCaps *caps;
+  GRecMutex lock;
+
+  DownloadRequestEventCallback completion_cb;
+  DownloadRequestEventCallback cancellation_cb;
+  DownloadRequestEventCallback error_cb;
+  DownloadRequestEventCallback progress_cb;
+  void *cb_data;
+};
+
+#define DOWNLOAD_REQUEST_PRIVATE(frag) ((DownloadRequestPrivate *)(frag))
+
+static void download_request_free (DownloadRequest * request);
+
+DownloadRequest *
+download_request_new (void)
+{
+  DownloadRequest *request =
+      (DownloadRequest *) g_slice_new0 (DownloadRequestPrivate);
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+
+  g_atomic_int_set (&request->ref_count, 1);
+
+  g_rec_mutex_init (&priv->lock);
+
+  priv->buffer = NULL;
+
+  request->state = DOWNLOAD_REQUEST_STATE_UNSENT;
+  request->status_code = 0;
+
+  request->download_request_time = GST_CLOCK_TIME_NONE;
+  request->download_start_time = GST_CLOCK_TIME_NONE;
+  request->download_end_time = GST_CLOCK_TIME_NONE;
+  request->headers = NULL;
+
+  return (DownloadRequest *) (request);
+}
+
+DownloadRequest *
+download_request_new_uri (const gchar * uri)
+{
+  DownloadRequest *request = download_request_new ();
+
+  request->uri = g_strdup (uri);
+  request->range_start = 0;
+  request->range_end = -1;
+
+  return request;
+}
+
+DownloadRequest *
+download_request_new_uri_range (const gchar * uri, gint64 range_start,
+    gint64 range_end)
+{
+  DownloadRequest *request = download_request_new ();
+
+  request->uri = g_strdup (uri);
+  request->range_start = range_start;
+  request->range_end = range_end;
+
+  return request;
+}
+
+static void
+download_request_free (DownloadRequest * request)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+
+  g_free (request->uri);
+  g_free (request->redirect_uri);
+  if (request->headers) {
+    gst_structure_free (request->headers);
+    request->headers = NULL;
+  }
+
+  if (priv->buffer != NULL) {
+    gst_buffer_unref (priv->buffer);
+    priv->buffer = NULL;
+  }
+
+  if (priv->caps != NULL) {
+    gst_caps_unref (priv->caps);
+    priv->caps = NULL;
+  }
+
+  g_rec_mutex_clear (&priv->lock);
+
+  g_slice_free1 (sizeof (DownloadRequestPrivate), priv);
+}
+
+void
+download_request_set_callbacks (DownloadRequest * request,
+    DownloadRequestEventCallback on_completion,
+    DownloadRequestEventCallback on_error,
+    DownloadRequestEventCallback on_cancellation,
+    DownloadRequestEventCallback on_progress, void *cb_data)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+  g_rec_mutex_lock (&priv->lock);
+  priv->completion_cb = on_completion;
+  priv->error_cb = on_error;
+  priv->cancellation_cb = on_cancellation;
+  priv->progress_cb = on_progress;
+  priv->cb_data = cb_data;
+
+  request->send_progress = (on_progress != NULL);
+
+  g_rec_mutex_unlock (&priv->lock);
+}
+
+/* Called with request lock held */
+void
+download_request_despatch_progresss (DownloadRequest * request)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+  if (priv->progress_cb)
+    priv->progress_cb (request, request->state, priv->cb_data);
+}
+
+/* Called with request lock held */
+void
+download_request_despatch_completion (DownloadRequest * request)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+  switch (request->state) {
+    case DOWNLOAD_REQUEST_STATE_UNSENT:
+      if (priv->cancellation_cb)
+        priv->cancellation_cb (request, request->state, priv->cb_data);
+      break;
+    case DOWNLOAD_REQUEST_STATE_COMPLETE:
+      if (priv->completion_cb)
+        priv->completion_cb (request, request->state, priv->cb_data);
+      break;
+    case DOWNLOAD_REQUEST_STATE_ERROR:
+      if (priv->error_cb)
+        priv->error_cb (request, request->state, priv->cb_data);
+      break;
+    default:
+      g_assert_not_reached ();
+  }
+}
+
+
+DownloadRequest *
+download_request_ref (DownloadRequest * request)
+{
+  g_return_val_if_fail (request != NULL, NULL);
+  g_atomic_int_inc (&request->ref_count);
+
+  return request;
+}
+
+void
+download_request_unref (DownloadRequest * request)
+{
+  g_return_if_fail (request != NULL);
+
+  if (g_atomic_int_dec_and_test (&request->ref_count)) {
+    download_request_free (request);
+  }
+}
+
+void
+download_request_lock (DownloadRequest * request)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+  g_rec_mutex_lock (&priv->lock);
+}
+
+void
+download_request_unlock (DownloadRequest * request)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+  g_rec_mutex_unlock (&priv->lock);
+}
+
+GstBuffer *
+download_request_take_buffer (DownloadRequest * request)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+  GstBuffer *buffer = NULL;
+
+  g_return_val_if_fail (request != NULL, NULL);
+
+  g_rec_mutex_lock (&priv->lock);
+
+  if (request->state != DOWNLOAD_REQUEST_STATE_LOADING
+      && request->state != DOWNLOAD_REQUEST_STATE_COMPLETE) {
+    g_rec_mutex_unlock (&priv->lock);
+    return NULL;
+  }
+
+  buffer = priv->buffer;
+  priv->buffer = NULL;
+
+  g_rec_mutex_unlock (&priv->lock);
+
+  return buffer;
+}
+
+void
+download_request_set_uri (DownloadRequest * request, const gchar * uri,
+    gint64 range_start, gint64 range_end)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+  g_rec_mutex_lock (&priv->lock);
+
+  g_assert (request->in_use == FALSE);
+
+  if (request->uri != uri) {
+    g_free (request->uri);
+    request->uri = g_strdup (uri);
+  }
+
+  g_free (request->redirect_uri);
+  request->redirect_uri = NULL;
+  request->redirect_permanent = FALSE;
+
+  request->range_start = range_start;
+  request->range_end = range_end;
+
+  g_rec_mutex_unlock (&priv->lock);
+}
+
+void
+download_request_reset (DownloadRequest * request)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+
+  g_rec_mutex_lock (&priv->lock);
+  g_assert (request->in_use == FALSE);
+  request->state = DOWNLOAD_REQUEST_STATE_UNSENT;
+
+  if (request->headers) {
+    gst_structure_free (request->headers);
+    request->headers = NULL;
+  }
+
+  if (priv->buffer != NULL) {
+    gst_buffer_unref (priv->buffer);
+    priv->buffer = NULL;
+  }
+
+  if (priv->caps != NULL) {
+    gst_caps_unref (priv->caps);
+    priv->caps = NULL;
+  }
+
+  g_rec_mutex_unlock (&priv->lock);
+}
+
+/* Called when the request is submitted, to clear any settings from a previous
+ * download */
+void
+download_request_begin_download (DownloadRequest * request)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+
+  g_return_if_fail (request != NULL);
+
+  g_rec_mutex_lock (&priv->lock);
+
+  if (priv->buffer != NULL) {
+    gst_buffer_unref (priv->buffer);
+    priv->buffer = NULL;
+  }
+
+  if (request->headers) {
+    gst_structure_free (request->headers);
+    request->headers = NULL;
+  }
+
+  if (priv->caps != NULL) {
+    gst_caps_unref (priv->caps);
+    priv->caps = NULL;
+  }
+
+  request->content_length = 0;
+  request->content_received = 0;
+
+  request->download_request_time = GST_CLOCK_TIME_NONE;
+  request->download_start_time = GST_CLOCK_TIME_NONE;
+  request->download_end_time = GST_CLOCK_TIME_NONE;
+
+  g_rec_mutex_unlock (&priv->lock);
+}
+
+void
+download_request_set_caps (DownloadRequest * request, GstCaps * caps)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+  g_return_if_fail (request != NULL);
+
+  g_rec_mutex_lock (&priv->lock);
+  gst_caps_replace (&priv->caps, caps);
+  g_rec_mutex_unlock (&priv->lock);
+}
+
+GstCaps *
+download_request_get_caps (DownloadRequest * request)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+  GstCaps *caps;
+
+  g_return_val_if_fail (request != NULL, NULL);
+
+  if (request->state != DOWNLOAD_REQUEST_STATE_LOADING
+      && request->state != DOWNLOAD_REQUEST_STATE_COMPLETE)
+    return NULL;
+
+  g_rec_mutex_lock (&priv->lock);
+  if (priv->caps == NULL) {
+    guint64 offset, offset_end;
+
+    /* FIXME: This is currently necessary as typefinding only
+     * works with 0 offsets... need to find a better way to
+     * do that */
+    offset = GST_BUFFER_OFFSET (priv->buffer);
+    offset_end = GST_BUFFER_OFFSET_END (priv->buffer);
+    GST_BUFFER_OFFSET (priv->buffer) = GST_BUFFER_OFFSET_NONE;
+    GST_BUFFER_OFFSET_END (priv->buffer) = GST_BUFFER_OFFSET_NONE;
+    priv->caps = gst_type_find_helper_for_buffer (NULL, priv->buffer, NULL);
+    GST_BUFFER_OFFSET (priv->buffer) = offset;
+    GST_BUFFER_OFFSET_END (priv->buffer) = offset_end;
+  }
+
+  caps = gst_caps_ref (priv->caps);
+  g_rec_mutex_unlock (&priv->lock);
+
+  return caps;
+}
+
+gboolean
+download_request_add_buffer (DownloadRequest * request, GstBuffer * buffer)
+{
+  DownloadRequestPrivate *priv = DOWNLOAD_REQUEST_PRIVATE (request);
+
+  g_return_val_if_fail (request != NULL, FALSE);
+  g_return_val_if_fail (buffer != NULL, FALSE);
+
+  if (request->state == DOWNLOAD_REQUEST_STATE_COMPLETE) {
+    GST_WARNING ("Download request is completed, could not add more buffers");
+    gst_buffer_unref (buffer);
+    return FALSE;
+  }
+
+  GST_DEBUG ("Adding new buffer %" GST_PTR_FORMAT " to the request data",
+      buffer);
+
+  request->content_received += gst_buffer_get_size (buffer);
+
+  /* We steal the buffers you pass in */
+  if (priv->buffer == NULL)
+    priv->buffer = buffer;
+  else
+    priv->buffer = gst_buffer_append (priv->buffer, buffer);
+
+  return TRUE;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/downloadrequest.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/downloadrequest.h
new file mode 100644 (file)
index 0000000..23627ca
--- /dev/null
@@ -0,0 +1,110 @@
+/* GStreamer
+ * Copyright (C) 2011 Andoni Morales Alastruey <ylatuya@gmail.com>
+ * Copyright (C) 2021-2022 Jan Schmidt <jan@centricular.com>
+ *
+ * downloadrequest.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __DOWNLOAD_REQUEST_H__
+#define __DOWNLOAD_REQUEST_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define DOWNLOAD_REQUEST(obj) ((DownloadRequest *)(obj))
+
+typedef struct _DownloadRequest DownloadRequest;
+typedef enum _DownloadRequestState DownloadRequestState;
+
+typedef void (*DownloadRequestEventCallback) (DownloadRequest *request, DownloadRequestState state, void *cb_data);
+
+enum _DownloadRequestState {
+  DOWNLOAD_REQUEST_STATE_UNSENT,
+  DOWNLOAD_REQUEST_STATE_OPEN,     /* Request sent, but no response yet */
+  DOWNLOAD_REQUEST_STATE_HEADERS_RECEIVED, /* Response headers received, awaiting body */
+  DOWNLOAD_REQUEST_STATE_LOADING,  /* Content loading in progress */
+  DOWNLOAD_REQUEST_STATE_COMPLETE, /* Request processing finished successfully - check status_code for completion 200-399 codes */
+  DOWNLOAD_REQUEST_STATE_ERROR,    /* Request generated an http error - check status_code */
+};
+
+struct _DownloadRequest
+{
+  gint ref_count;
+
+  gboolean in_use; /* TRUE if this request is being serviced */
+  gboolean send_progress; /* TRUE if this request wants progress events */
+
+  DownloadRequestState state;
+  guint status_code; /* HTTP status code */
+
+  /* Request parameters */
+  gchar * uri;                  /* URI of the request */
+  gint64 range_start;
+  gint64 range_end;
+
+  /* possibly populated during a download */
+  gchar * redirect_uri;         /* Redirect target if any */
+  gboolean redirect_permanent;  /* If the redirect is permanent */
+
+  GstStructure *headers;        /* HTTP request/response headers */
+  guint64 content_length;       /* Response content length, if known (or 0) */
+  guint64 content_received;     /* Response content received so far */
+
+  guint64 download_request_time;  /* Epoch time when the download started */
+  guint64 download_start_time;    /* Epoch time when the first data for the download arrived */
+  guint64 download_end_time;      /* Epoch time when the download finished */
+};
+
+void download_request_set_uri (DownloadRequest *request, const gchar *uri,
+    gint64 range_start, gint64 range_end);
+
+/* Reset the request state back to UNSENT and clear any stored info. The request must not be in use */
+void download_request_reset (DownloadRequest *request);
+
+void download_request_begin_download (DownloadRequest *request);
+
+void download_request_set_caps (DownloadRequest * request, GstCaps * caps);
+
+GstCaps * download_request_get_caps (DownloadRequest * request);
+
+gboolean download_request_add_buffer (DownloadRequest *request, GstBuffer *buffer);
+GstBuffer * download_request_take_buffer (DownloadRequest *request);
+
+DownloadRequest * download_request_new (void);
+DownloadRequest * download_request_new_uri (const gchar * uri);
+DownloadRequest * download_request_new_uri_range (const gchar * uri, gint64 range_start, gint64 range_end);
+
+void download_request_set_callbacks (DownloadRequest *request,
+    DownloadRequestEventCallback on_completion,
+    DownloadRequestEventCallback on_error,
+    DownloadRequestEventCallback on_cancellation,
+    DownloadRequestEventCallback on_progress,
+    void *cb_data);
+
+DownloadRequest *download_request_ref (DownloadRequest *request);
+void download_request_unref (DownloadRequest *request);
+
+void download_request_lock (DownloadRequest *request);
+void download_request_unlock (DownloadRequest *request);
+
+void download_request_despatch_progresss (DownloadRequest *request);
+void download_request_despatch_completion (DownloadRequest *request);
+
+G_END_DECLS
+#endif /* __DOWNLOAD_REQUEST_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-period.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-period.c
new file mode 100644 (file)
index 0000000..eab38fb
--- /dev/null
@@ -0,0 +1,286 @@
+/* GStreamer
+ *
+ * Copyright (C) 2021-2022 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstadaptivedemux.h"
+#include "gstadaptivedemux-private.h"
+
+GST_DEBUG_CATEGORY_EXTERN (adaptivedemux2_debug);
+#define GST_CAT_DEFAULT adaptivedemux2_debug
+
+GstAdaptiveDemuxPeriod *
+gst_adaptive_demux_period_new (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxPeriod *period;
+
+  period = g_new0 (GstAdaptiveDemuxPeriod, 1);
+  g_atomic_int_set (&period->ref_count, 1);
+
+  period->demux = demux;
+  period->period_num = demux->priv->n_periods++;
+
+  g_queue_push_tail (demux->priv->periods, period);
+
+  return period;
+}
+
+static void
+_demux_period_free (GstAdaptiveDemuxPeriod * period)
+{
+  /* Disable and remove all streams and tracks. */
+  g_list_free_full (period->streams, (GDestroyNotify) gst_object_unref);
+
+  /* Theoretically all tracks should have gone by now */
+  GST_DEBUG ("Disabling and removing all tracks");
+  g_list_free_full (period->tracks,
+      (GDestroyNotify) gst_adaptive_demux_track_unref);
+
+  g_free (period);
+}
+
+GstAdaptiveDemuxPeriod *
+gst_adaptive_demux_period_ref (GstAdaptiveDemuxPeriod * period)
+{
+  g_return_val_if_fail (period != NULL, NULL);
+
+  GST_TRACE ("%p %d -> %d", period, period->ref_count, period->ref_count + 1);
+
+  g_atomic_int_inc (&period->ref_count);
+
+  return period;
+}
+
+void
+gst_adaptive_demux_period_unref (GstAdaptiveDemuxPeriod * period)
+{
+  g_return_if_fail (period != NULL);
+
+  GST_TRACE ("%p %d -> %d", period, period->ref_count, period->ref_count - 1);
+
+  if (g_atomic_int_dec_and_test (&period->ref_count)) {
+    _demux_period_free (period);
+  }
+}
+
+static GstAdaptiveDemuxTrack *
+default_track_for_stream_type_locked (GstAdaptiveDemuxPeriod * period,
+    GstStreamType stream_type)
+{
+  GList *tmp;
+  GstAdaptiveDemuxTrack *res = NULL, *select = NULL;
+
+  for (tmp = period->tracks; tmp; tmp = tmp->next) {
+    GstAdaptiveDemuxTrack *cand = tmp->data;
+
+    if (cand->type == stream_type) {
+      /* If selected, we're done */
+      if (cand->selected)
+        return cand;
+      if (!select && cand->flags & GST_STREAM_FLAG_SELECT)
+        res = select = cand;
+      if (res == NULL)
+        res = cand;
+    }
+  }
+
+  return res;
+}
+
+/* called with TRACKS_LOCK taken */
+void
+gst_adaptive_demux_period_select_default_tracks (GstAdaptiveDemux * demux,
+    GstAdaptiveDemuxPeriod * period)
+{
+  GstAdaptiveDemuxTrack *track;
+  gboolean changed = FALSE;
+
+  GST_DEBUG_OBJECT (demux, "Picking a default selection");
+
+  /* Do initial selection (pick default for each type) */
+  if ((track =
+          default_track_for_stream_type_locked (period,
+              GST_STREAM_TYPE_VIDEO))) {
+    GST_DEBUG_OBJECT (demux, "Selecting default video track %s",
+        track->stream_id);
+    if (!track->selected) {
+      changed = TRUE;
+      track->selected = TRUE;
+      gst_pad_set_active (track->sinkpad, TRUE);
+    }
+  }
+
+  if ((track =
+          default_track_for_stream_type_locked (period,
+              GST_STREAM_TYPE_AUDIO))) {
+    GST_DEBUG_OBJECT (demux, "Selecting default audio track %s",
+        track->stream_id);
+    if (!track->selected) {
+      changed = TRUE;
+      track->selected = TRUE;
+      gst_pad_set_active (track->sinkpad, TRUE);
+    }
+  }
+
+  if ((track =
+          default_track_for_stream_type_locked (period,
+              GST_STREAM_TYPE_TEXT))) {
+    GST_DEBUG_OBJECT (demux, "Selecting default text track %s",
+        track->stream_id);
+    if (!track->selected) {
+      changed = TRUE;
+      track->selected = TRUE;
+      gst_pad_set_active (track->sinkpad, TRUE);
+    }
+  }
+
+  if (changed)
+    g_atomic_int_set (&demux->priv->requested_selection_seqnum,
+        gst_util_seqnum_next ());
+}
+
+static GstAdaptiveDemuxTrack *
+gst_adaptive_demux_period_find_matching_track (GstAdaptiveDemuxPeriod * period,
+    GstAdaptiveDemuxTrack * track)
+{
+  GList *iter;
+
+  for (iter = period->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *cand = iter->data;
+
+    if (!cand->selected && cand->type == track->type) {
+      /* FIXME : Improve this a *lot* */
+      if (!g_strcmp0 (cand->stream_id, track->stream_id))
+        return cand;
+    }
+  }
+
+  return NULL;
+}
+
+void
+gst_adaptive_demux_period_transfer_selection (GstAdaptiveDemux * demux,
+    GstAdaptiveDemuxPeriod * next_period,
+    GstAdaptiveDemuxPeriod * current_period)
+{
+  GList *iter;
+
+  for (iter = current_period->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *track = iter->data;
+    if (track->selected) {
+      GstAdaptiveDemuxTrack *new_track =
+          gst_adaptive_demux_period_find_matching_track (next_period, track);
+      if (new_track) {
+        GST_DEBUG_OBJECT (demux, "Selecting replacement track %s",
+            new_track->stream_id);
+        new_track->selected = TRUE;
+        gst_pad_set_active (new_track->sinkpad, TRUE);
+      } else {
+        GST_WARNING_OBJECT (demux, "Could not find replacement track for %s",
+            track->stream_id);
+        /* FIXME : Pick a default for that type ? Just continue as-is ? */
+      }
+    }
+  }
+}
+
+/* called with TRACKS_LOCK taken */
+gboolean
+gst_adaptive_demux_period_add_track (GstAdaptiveDemuxPeriod * period,
+    GstAdaptiveDemuxTrack * track)
+{
+  GST_LOG ("period %d track:%p", period->period_num, track);
+
+  /* Actually create and add the elements to the demuxer */
+  if (!gst_adaptive_demux_track_add_elements (track, period->period_num)) {
+    GST_ERROR ("Failed to add track");
+    return FALSE;
+  }
+
+  period->tracks =
+      g_list_append (period->tracks, gst_adaptive_demux_track_ref (track));
+  period->tracks_changed = TRUE;
+
+  return TRUE;
+}
+
+/* must be called with manifest_lock taken */
+GstFlowReturn
+gst_adaptive_demux_period_combine_stream_flows (GstAdaptiveDemuxPeriod * period)
+{
+  gboolean all_notlinked = TRUE;
+  gboolean all_eos = TRUE;
+  GList *iter;
+
+  for (iter = period->streams; iter; iter = g_list_next (iter)) {
+    GstAdaptiveDemux2Stream *stream = iter->data;
+
+    if (stream->last_ret != GST_FLOW_NOT_LINKED) {
+      all_notlinked = FALSE;
+      if (stream->last_ret != GST_FLOW_EOS)
+        all_eos = FALSE;
+    }
+
+    if (stream->last_ret <= GST_FLOW_NOT_NEGOTIATED
+        || stream->last_ret == GST_FLOW_FLUSHING) {
+      return stream->last_ret;
+    }
+  }
+
+  if (all_notlinked)
+    return GST_FLOW_NOT_LINKED;
+
+  if (all_eos)
+    return GST_FLOW_EOS;
+
+  return GST_FLOW_OK;
+}
+
+void
+gst_adaptive_demux_period_stop_tasks (GstAdaptiveDemuxPeriod * period)
+{
+  GList *iter;
+
+  for (iter = period->streams; iter; iter = g_list_next (iter)) {
+    GstAdaptiveDemux2Stream *stream = iter->data;
+
+    gst_adaptive_demux2_stream_stop (stream);
+
+    stream->download_error_count = 0;
+    stream->need_header = TRUE;
+  }
+}
+
+gboolean
+gst_adaptive_demux_period_has_pending_tracks (GstAdaptiveDemuxPeriod * period)
+{
+  GList *iter;
+
+  for (iter = period->streams; iter; iter = g_list_next (iter)) {
+    GstAdaptiveDemux2Stream *stream = iter->data;
+    if (stream->pending_tracks)
+      return TRUE;
+  }
+  return FALSE;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-private.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-private.h
new file mode 100644 (file)
index 0000000..cafe418
--- /dev/null
@@ -0,0 +1,238 @@
+/* GStreamer
+ *
+ * Copyright (C) 2014 Samsung Electronics. All rights reserved.
+ *   Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * Copyright (C) 2021-2022 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _GST_ADAPTIVE_DEMUX_PRIVATE_H_
+#define _GST_ADAPTIVE_DEMUX_PRIVATE_H_
+
+#include <gst/base/gstadapter.h>
+#include <gst/base/gstflowcombiner.h>
+
+#define NUM_LOOKBACK_FRAGMENTS 3
+#define MAX_DOWNLOAD_ERROR_COUNT 3
+
+/* Internal, so not using GST_FLOW_CUSTOM_SUCCESS_N */
+#define GST_ADAPTIVE_DEMUX_FLOW_SWITCH (GST_FLOW_CUSTOM_SUCCESS_2 + 1)
+
+#define TRACKS_GET_LOCK(d) (&GST_ADAPTIVE_DEMUX_CAST(d)->priv->tracks_lock)
+#define TRACKS_LOCK(d) g_mutex_lock (TRACKS_GET_LOCK (d))
+#define TRACKS_UNLOCK(d) g_mutex_unlock (TRACKS_GET_LOCK (d))
+
+#define BUFFERING_GET_LOCK(d) (&GST_ADAPTIVE_DEMUX_CAST(d)->priv->buffering_lock)
+#define BUFFERING_LOCK(d) g_mutex_lock (BUFFERING_GET_LOCK (d))
+#define BUFFERING_UNLOCK(d) g_mutex_unlock (BUFFERING_GET_LOCK (d))
+
+#define GST_MANIFEST_GET_LOCK(d) (&(GST_ADAPTIVE_DEMUX_CAST(d)->priv->manifest_lock))
+#define GST_MANIFEST_LOCK(d) G_STMT_START { \
+    GST_TRACE("Locking manifest from thread %p", g_thread_self()); \
+    g_rec_mutex_lock (GST_MANIFEST_GET_LOCK (d)); \
+    GST_TRACE("Locked manifest from thread %p", g_thread_self()); \
+ } G_STMT_END
+
+#define GST_MANIFEST_UNLOCK(d) G_STMT_START { \
+    GST_TRACE("Unlocking manifest from thread %p", g_thread_self()); \
+    g_rec_mutex_unlock (GST_MANIFEST_GET_LOCK (d)); \
+ } G_STMT_END
+
+#define GST_ADAPTIVE_DEMUX_GET_SCHEDULER(d) (GST_ADAPTIVE_DEMUX_CAST(d)->priv->scheduler_task)
+
+#define GST_ADAPTIVE_SCHEDULER_LOCK(d) gst_adaptive_demux_scheduler_lock(demux)
+#define GST_ADAPTIVE_SCHEDULER_UNLOCK(d) G_STMT_START { \
+    GST_TRACE("Unlocking scheduler from thread %p", g_thread_self()); \
+    gst_adaptive_demux_loop_unlock_and_unpause (GST_ADAPTIVE_DEMUX_GET_SCHEDULER (d)); \
+ } G_STMT_END
+
+#define GST_ADAPTIVE_DEMUX_SEGMENT_GET_LOCK(d) (&GST_ADAPTIVE_DEMUX_CAST(d)->priv->segment_lock)
+#define GST_ADAPTIVE_DEMUX_SEGMENT_LOCK(d) g_mutex_lock (GST_ADAPTIVE_DEMUX_SEGMENT_GET_LOCK (d))
+#define GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK(d) g_mutex_unlock (GST_ADAPTIVE_DEMUX_SEGMENT_GET_LOCK (d))
+
+struct _GstAdaptiveDemuxPrivate
+{
+  GstAdapter *input_adapter;    /* protected by manifest_lock */
+  gint have_manifest;           /* MT safe */
+
+  /* Adaptive scheduling and parsing task */
+  GstAdaptiveDemuxLoop *scheduler_task;
+  GMutex scheduler_lock;
+
+  /* Callback / timer id for the next manifest update */
+  guint manifest_updates_cb;
+
+  /* Count of failed manifest updates */
+  gint update_failed_count;
+
+  guint32 segment_seqnum;       /* protected by manifest_lock */
+
+  /* main lock used to protect adaptive demux and all its streams.
+   * It serializes the adaptive demux public API.
+   */
+  GRecMutex manifest_lock;
+
+  /* Duration, updated after manifest updates */
+  GstClockTime duration;
+
+  /* Set to TRUE if any stream is waiting on the manifest update */
+  gboolean stream_waiting_for_manifest;
+
+  GMutex api_lock;
+
+  /* Protects demux and stream segment information
+   * Needed because seeks can update segment information
+   * without needing to stop tasks when they just want to
+   * update the segment boundaries */
+  GMutex segment_lock;
+
+  GstClockTime qos_earliest_time;
+
+  /* Protects all tracks and period content */
+  GMutex tracks_lock;
+  /* Used to notify addition to a waiting (i.e. previously empty) track */
+  GCond tracks_add;
+  /* TRUE if we are buffering */
+  gboolean is_buffering;
+  /* TRUE if percent changed and message should be posted */
+  gboolean percent_changed;
+  gint percent;
+
+  /* Serialises buffering message posting to avoid out-of-order
+   * posting */
+  GMutex buffering_lock;
+
+  /* Atomic */
+  guint32 requested_selection_seqnum;
+
+  /* Lock protecting all the following fields */
+  GRecMutex output_lock;
+  /* Output task */
+  GstTask *output_task;
+  /* List of enabled OutputSlot */
+  GList *outputs;
+  /* flow combiner of output slots */
+  GstFlowCombiner *flowcombiner;
+  /* protected by output_lock */
+  gboolean flushing;
+  /* Current output selection seqnum */
+  guint32 current_selection_seqnum;
+  /* Current output position (in running time) */
+  GstClockTimeDiff global_output_position;
+  /* End of fields protected by output_lock */
+
+  gint n_audio_streams, n_video_streams, n_subtitle_streams;
+
+  /* Counter used for uniquely identifying periods */
+  gint n_periods;
+
+  /* Array of periods.
+   *
+   * Head is the period being outputted, or to be outputted first
+   * Tail is where new streams get added */
+  GQueue *periods;
+};
+
+static inline gboolean gst_adaptive_demux_scheduler_lock(GstAdaptiveDemux *d)
+{
+    GST_TRACE("Locking scheduler from thread %p", g_thread_self());
+    if (!gst_adaptive_demux_loop_pause_and_lock (GST_ADAPTIVE_DEMUX_GET_SCHEDULER (d)))
+      return FALSE;
+
+    GST_TRACE("Locked scheduler from thread %p", g_thread_self());
+    return TRUE;
+}
+
+void demux_update_buffering_locked (GstAdaptiveDemux * demux);
+void demux_post_buffering_locked (GstAdaptiveDemux * demux);
+
+GstFlowReturn gst_adaptive_demux_update_manifest (GstAdaptiveDemux *demux);
+
+void gst_adaptive_demux2_stream_wants_manifest_update (GstAdaptiveDemux * demux);
+
+void gst_adaptive_demux2_stream_parse_error (GstAdaptiveDemux2Stream *stream, GError * err);
+GstClockTime gst_adaptive_demux2_stream_get_fragment_waiting_time (GstAdaptiveDemux *
+    demux, GstAdaptiveDemux2Stream * stream);
+GstClockTime gst_adaptive_demux2_stream_get_presentation_offset (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+GstClockTime gst_adaptive_demux_get_period_start_time (GstAdaptiveDemux * demux);
+
+gboolean gst_adaptive_demux_is_live (GstAdaptiveDemux * demux);
+
+void gst_adaptive_demux2_stream_on_manifest_update (GstAdaptiveDemux2Stream * stream);
+void gst_adaptive_demux2_stream_on_output_space_available (GstAdaptiveDemux2Stream *stream);
+
+gboolean gst_adaptive_demux2_stream_has_next_fragment (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+GstFlowReturn gst_adaptive_demux2_stream_update_fragment_info (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+GstFlowReturn gst_adaptive_demux2_stream_seek (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, gboolean forward, GstSeekFlags flags,
+    GstClockTimeDiff ts, GstClockTimeDiff * final_ts);
+gboolean gst_adaptive_demux_get_live_seek_range (GstAdaptiveDemux * demux,
+    gint64 * range_start, gint64 * range_stop);
+gboolean gst_adaptive_demux2_stream_in_live_seek_range (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+gboolean gst_adaptive_demux2_stream_is_selected_locked (GstAdaptiveDemux2Stream *stream);
+
+gboolean gst_adaptive_demux_has_next_period (GstAdaptiveDemux * demux);
+void gst_adaptive_demux_advance_period (GstAdaptiveDemux * demux);
+
+void gst_adaptive_demux2_stream_stop (GstAdaptiveDemux2Stream * stream);
+
+typedef struct
+{
+  GstMiniObject *item;
+  gsize size;
+  /* running time of item : GST_CLOCK_STIME_NONE for non-timed data */
+  GstClockTimeDiff runningtime;
+  /* GST_CLOCK_STIME_NONE for non-timed data */
+  GstClockTimeDiff runningtime_end;
+} TrackQueueItem;
+
+GstAdaptiveDemux2Stream *find_stream_for_track_locked (GstAdaptiveDemux *
+    demux, GstAdaptiveDemuxTrack * track);
+
+GstMiniObject * track_dequeue_data_locked (GstAdaptiveDemux * demux, GstAdaptiveDemuxTrack * track, gboolean check_sticky_events);
+void gst_adaptive_demux_track_flush (GstAdaptiveDemuxTrack * track);
+void gst_adaptive_demux_track_drain_to (GstAdaptiveDemuxTrack * track, GstClockTime drain_running_time);
+void gst_adaptive_demux_track_update_next_position (GstAdaptiveDemuxTrack * track);
+
+/* Period functions */
+GstAdaptiveDemuxPeriod * gst_adaptive_demux_period_new (GstAdaptiveDemux * demux);
+
+GstAdaptiveDemuxPeriod * gst_adaptive_demux_period_ref (GstAdaptiveDemuxPeriod * period);
+void                     gst_adaptive_demux_period_unref (GstAdaptiveDemuxPeriod * period);
+
+gboolean                 gst_adaptive_demux_period_add_track (GstAdaptiveDemuxPeriod * period,
+                                                             GstAdaptiveDemuxTrack * track);
+gboolean                 gst_adaptive_demux_track_add_elements (GstAdaptiveDemuxTrack * track,
+                                                               guint period_num);
+
+void                     gst_adaptive_demux_period_select_default_tracks (GstAdaptiveDemux * demux,
+                                                                         GstAdaptiveDemuxPeriod * period);
+void                     gst_adaptive_demux_period_transfer_selection (GstAdaptiveDemux * demux,
+                                                                      GstAdaptiveDemuxPeriod * next_period,
+                                                                      GstAdaptiveDemuxPeriod * current_period);
+void                     gst_adaptive_demux_period_stop_tasks (GstAdaptiveDemuxPeriod * period);
+GstFlowReturn            gst_adaptive_demux_period_combine_stream_flows (GstAdaptiveDemuxPeriod * period);
+
+gboolean                 gst_adaptive_demux_period_has_pending_tracks (GstAdaptiveDemuxPeriod * period);
+
+#endif
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-stream.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-stream.c
new file mode 100644 (file)
index 0000000..8ef5056
--- /dev/null
@@ -0,0 +1,1977 @@
+/* GStreamer
+ *
+ * Copyright (C) 2014 Samsung Electronics. All rights reserved.
+ *   Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * Copyright (C) 2021-2022 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstadaptivedemux.h"
+#include "gstadaptivedemux-private.h"
+
+#include "gst/gst-i18n-plugin.h"
+#include <gst/app/gstappsrc.h>
+
+GST_DEBUG_CATEGORY_EXTERN (adaptivedemux2_debug);
+#define GST_CAT_DEFAULT adaptivedemux2_debug
+
+static void gst_adaptive_demux2_stream_finalize (GObject * object);
+static void gst_adaptive_demux2_stream_error (GstAdaptiveDemux2Stream * stream);
+
+#define gst_adaptive_demux2_stream_parent_class parent_class
+G_DEFINE_ABSTRACT_TYPE (GstAdaptiveDemux2Stream, gst_adaptive_demux2_stream,
+    GST_TYPE_OBJECT);
+
+static void
+gst_adaptive_demux2_stream_class_init (GstAdaptiveDemux2StreamClass * klass)
+{
+  GObjectClass *gobject_class = (GObjectClass *) klass;
+
+  gobject_class->finalize = gst_adaptive_demux2_stream_finalize;
+}
+
+static GType tsdemux_type = 0;
+
+static void
+gst_adaptive_demux2_stream_init (GstAdaptiveDemux2Stream * stream)
+{
+  stream->download_request = download_request_new ();
+  stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_STOPPED;
+  stream->last_ret = GST_FLOW_OK;
+
+  stream->fragment_bitrates =
+      g_malloc0 (sizeof (guint64) * NUM_LOOKBACK_FRAGMENTS);
+
+  gst_segment_init (&stream->parse_segment, GST_FORMAT_TIME);
+}
+
+/* must be called with manifest_lock taken.
+ * It will temporarily drop the manifest_lock in order to join the task.
+ * It will join only the old_streams (the demux->streams are joined by
+ * gst_adaptive_demux_stop_tasks before gst_adaptive_demux2_stream_free is
+ * called)
+ */
+static void
+gst_adaptive_demux2_stream_finalize (GObject * object)
+{
+  GstAdaptiveDemux2Stream *stream = (GstAdaptiveDemux2Stream *) object;
+
+  GST_LOG_OBJECT (object, "Finalizing");
+
+  if (stream->download_request)
+    download_request_unref (stream->download_request);
+
+  stream->cancelled = TRUE;
+  g_clear_error (&stream->last_error);
+
+  gst_adaptive_demux2_stream_fragment_clear (&stream->fragment);
+
+  if (stream->pending_events) {
+    g_list_free_full (stream->pending_events, (GDestroyNotify) gst_event_unref);
+    stream->pending_events = NULL;
+  }
+
+  if (stream->parsebin_sink) {
+    gst_object_unref (stream->parsebin_sink);
+    stream->parsebin_sink = NULL;
+  }
+
+  if (stream->pad_added_id)
+    g_signal_handler_disconnect (stream->parsebin, stream->pad_added_id);
+  if (stream->pad_removed_id)
+    g_signal_handler_disconnect (stream->parsebin, stream->pad_removed_id);
+
+  g_free (stream->fragment_bitrates);
+
+  g_list_free_full (stream->tracks,
+      (GDestroyNotify) gst_adaptive_demux_track_unref);
+
+  if (stream->pending_caps)
+    gst_caps_unref (stream->pending_caps);
+
+  g_clear_pointer (&stream->pending_tags, gst_tag_list_unref);
+  g_clear_pointer (&stream->stream_collection, gst_object_unref);
+
+  G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+/**
+ * gst_adaptive_demux2_stream_add_track:
+ * @stream: A #GstAdaptiveDemux2Stream
+ * @track: (transfer none): A #GstAdaptiveDemuxTrack to assign to the @stream
+ *
+ * This function is called when a subclass knows of a target @track that this
+ * @stream can provide.
+ */
+gboolean
+gst_adaptive_demux2_stream_add_track (GstAdaptiveDemux2Stream * stream,
+    GstAdaptiveDemuxTrack * track)
+{
+  g_return_val_if_fail (track != NULL, FALSE);
+
+  GST_DEBUG_OBJECT (stream->demux, "stream:%p track:%s", stream,
+      track->stream_id);
+  if (g_list_find (stream->tracks, track)) {
+    GST_DEBUG_OBJECT (stream->demux,
+        "track '%s' already handled by this stream", track->stream_id);
+    return FALSE;
+  }
+
+  stream->tracks =
+      g_list_append (stream->tracks, gst_adaptive_demux_track_ref (track));
+  if (stream->demux) {
+    g_assert (stream->period);
+    gst_adaptive_demux_period_add_track (stream->period, track);
+  }
+  return TRUE;
+}
+
+static gboolean
+gst_adaptive_demux2_stream_next_download (GstAdaptiveDemux2Stream * stream);
+static gboolean
+gst_adaptive_demux2_stream_load_a_fragment (GstAdaptiveDemux2Stream * stream);
+static void
+gst_adaptive_demux2_stream_handle_playlist_eos (GstAdaptiveDemux2Stream *
+    stream);
+static GstFlowReturn
+gst_adaptive_demux2_stream_begin_download_uri (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, const gchar * uri, gint64 start,
+    gint64 end);
+
+#ifndef GST_DISABLE_GST_DEBUG
+static const char *
+uritype (GstAdaptiveDemux2Stream * s)
+{
+  if (s->downloading_header)
+    return "header";
+  if (s->downloading_index)
+    return "index";
+  return "fragment";
+}
+#endif
+
+/* Schedules another chunked download (returns TRUE) or FALSE if no more chunks */
+static gboolean
+schedule_another_chunk (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  DownloadRequest *request = stream->download_request;
+  GstFlowReturn ret;
+
+  gchar *uri = request->uri;
+  gint64 range_start = request->range_start;
+  gint64 range_end = request->range_end;
+  gint64 chunk_size;
+  gint64 chunk_end;
+
+  if (range_end == -1)
+    return FALSE;               /* This was a request to the end, no more to load */
+
+  /* The size of the request that just completed: */
+  chunk_size = range_end + 1 - range_start;
+
+  if (request->content_received < chunk_size)
+    return FALSE;               /* Short read - we're done */
+
+  /* Accumulate the data we just fetched, to figure out the next
+   * request start position and update the target chunk size from
+   * the updated stream fragment info */
+  range_start += chunk_size;
+  range_end = stream->fragment.range_end;
+  chunk_size = stream->fragment.chunk_size;
+
+  if (chunk_size == 0)
+    return FALSE;               /* Sub-class doesn't want another chunk */
+
+  /* HTTP ranges are inclusive for the end */
+  if (chunk_size != -1) {
+    chunk_end = range_start + chunk_size - 1;
+    if (range_end != -1 && range_end < chunk_end)
+      chunk_end = range_end;
+  } else {
+    chunk_end = range_end;
+  }
+
+  GST_DEBUG_OBJECT (stream,
+      "Starting next chunk %s %" G_GINT64_FORMAT "-%" G_GINT64_FORMAT
+      " chunk_size %" G_GINT64_FORMAT, uri, range_start, chunk_end, chunk_size);
+
+  ret =
+      gst_adaptive_demux2_stream_begin_download_uri (demux, stream, uri,
+      range_start, chunk_end);
+  if (ret != GST_FLOW_OK) {
+    GST_DEBUG_OBJECT (stream,
+        "Stopping stream due to begin download failure - ret %s",
+        gst_flow_get_name (ret));
+    gst_adaptive_demux2_stream_stop (stream);
+    return FALSE;
+  }
+
+  return TRUE;
+}
+
+static void
+drain_inactive_tracks (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GList *iter;
+
+  TRACKS_LOCK (demux);
+  for (iter = stream->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) iter->data;
+    if (!track->selected) {
+      gst_adaptive_demux_track_drain_to (track,
+          demux->priv->global_output_position);
+    }
+  }
+
+  TRACKS_UNLOCK (demux);
+}
+
+/* Called to complete a download, either due to failure or completion
+ * Should set up the next download if necessary */
+static void
+gst_adaptive_demux2_stream_finish_download (GstAdaptiveDemux2Stream *
+    stream, GstFlowReturn ret, GError * err)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (stream->demux);
+  GstAdaptiveDemux *demux = stream->demux;
+
+  GST_DEBUG_OBJECT (stream,
+      "%s download finish: %d %s - err: %p", uritype (stream), ret,
+      gst_flow_get_name (ret), err);
+
+  stream->download_finished = TRUE;
+
+  /* finish_fragment might call gst_adaptive_demux2_stream_advance_fragment,
+   * which can look at the last_ret - so make sure it's stored before calling that.
+   * Also, for not-linked or other errors passed in that are going to make
+   * this stream stop, we'll need to store it */
+  stream->last_ret = ret;
+
+  if (err) {
+    g_clear_error (&stream->last_error);
+    stream->last_error = g_error_copy (err);
+  }
+
+  /* For actual errors, stop now, no need to call finish_fragment and get
+   * confused if it returns a non-error status, but if EOS was passed in,
+   * continue and check whether finish_fragment() says we've finished
+   * the whole manifest or just this fragment */
+  if (ret < 0 && ret != GST_FLOW_EOS) {
+    GST_INFO_OBJECT (stream,
+        "Stopping stream due to error ret %s", gst_flow_get_name (ret));
+    gst_adaptive_demux2_stream_stop (stream);
+    return;
+  }
+
+  /* Handle all the possible flow returns here: */
+  if (ret == GST_ADAPTIVE_DEMUX_FLOW_END_OF_FRAGMENT) {
+    /* The sub-class wants to stop the fragment immediately */
+    stream->fragment.finished = TRUE;
+    ret = klass->finish_fragment (demux, stream);
+
+    GST_DEBUG_OBJECT (stream, "finish_fragment ret %d %s", ret,
+        gst_flow_get_name (ret));
+  } else if (ret == GST_ADAPTIVE_DEMUX_FLOW_RESTART_FRAGMENT) {
+    GST_DEBUG_OBJECT (stream, "Restarting download as requested");
+    /* Just mark the fragment as finished */
+    stream->fragment.finished = TRUE;
+    ret = GST_FLOW_OK;
+  } else if (!klass->need_another_chunk || stream->fragment.chunk_size == -1
+      || !klass->need_another_chunk (stream)
+      || stream->fragment.chunk_size == 0) {
+    stream->fragment.finished = TRUE;
+    ret = klass->finish_fragment (stream->demux, stream);
+
+    GST_DEBUG_OBJECT (stream, "finish_fragment ret %d %s", ret,
+        gst_flow_get_name (ret));
+  } else if (stream->fragment.chunk_size != 0
+      && schedule_another_chunk (stream)) {
+    /* Another download has already begun, no need to queue anything below */
+    return;
+  }
+
+  /* For HLS, we might be enqueueing data into tracks that aren't
+   * selected. Drain those ones out */
+  drain_inactive_tracks (stream->demux, stream);
+
+  /* Now that we've called finish_fragment we can clear these flags the
+   * sub-class might have checked */
+  if (stream->downloading_header) {
+    stream->need_header = FALSE;
+    stream->downloading_header = FALSE;
+  } else if (stream->downloading_index) {
+    stream->need_index = FALSE;
+    stream->downloading_index = FALSE;
+    /* Restart the fragment again now that header + index were loaded
+     * so that get_fragment_info() will be called again */
+    stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_START_FRAGMENT;
+  } else {
+    /* Finishing a fragment data download. Try for another */
+    stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_START_FRAGMENT;
+  }
+
+  /* if GST_FLOW_EOS was passed in that means this download is finished,
+   * but it's the result returned from finish_fragment() we really care
+   * about, as that tells us if the manifest has run out of fragments
+   * to load */
+  if (ret == GST_FLOW_EOS) {
+    stream->last_ret = ret;
+
+    gst_adaptive_demux2_stream_handle_playlist_eos (stream);
+    return;
+  }
+
+  /* Now finally, if ret is anything other than success, we should stop this
+   * stream */
+  if (ret < 0) {
+    GST_DEBUG_OBJECT (stream,
+        "Stopping stream due to finish fragment ret %s",
+        gst_flow_get_name (ret));
+    gst_adaptive_demux2_stream_stop (stream);
+    return;
+  }
+
+  /* Clear the last_ret marker before starting a fresh download */
+  stream->last_ret = GST_FLOW_OK;
+
+  GST_LOG_OBJECT (stream, "Scheduling next_download() call");
+  stream->pending_cb_id =
+      gst_adaptive_demux_loop_call (demux->priv->scheduler_task,
+      (GSourceFunc) gst_adaptive_demux2_stream_next_download,
+      gst_object_ref (stream), (GDestroyNotify) gst_object_unref);
+}
+
+/* Must be called from the scheduler context */
+void
+gst_adaptive_demux2_stream_parse_error (GstAdaptiveDemux2Stream * stream,
+    GError * err)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+
+  if (stream->state != GST_ADAPTIVE_DEMUX2_STREAM_STATE_DOWNLOADING)
+    return;
+
+  downloadhelper_cancel_request (demux->download_helper,
+      stream->download_request);
+
+  /* cancellation is async, so recycle our download request to avoid races */
+  download_request_unref (stream->download_request);
+  stream->download_request = download_request_new ();
+
+  gst_adaptive_demux2_stream_finish_download (stream, GST_FLOW_CUSTOM_ERROR,
+      err);
+}
+
+static void
+gst_adaptive_demux2_stream_prepare_segment (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, gboolean first_and_live)
+{
+  GstClockTime period_start = gst_adaptive_demux_get_period_start_time (demux);
+  GstClockTime offset =
+      gst_adaptive_demux2_stream_get_presentation_offset (demux, stream);
+
+  stream->parse_segment = demux->segment;
+
+  /* The demuxer segment is just built from seek events, but for each stream
+   * we have to adjust segments according to the current period and the
+   * stream specific presentation time offset.
+   *
+   * For each period, buffer timestamps start again from 0. Additionally the
+   * buffer timestamps are shifted by the stream specific presentation time
+   * offset, so the first buffer timestamp of a period is 0 + presentation
+   * time offset. If the stream contains timestamps itself, this is also
+   * supposed to be the presentation time stored inside the stream.
+   *
+   * The stream time over periods is supposed to be continuous, that is the
+   * buffer timestamp 0 + presentation time offset should map to the start
+   * time of the current period.
+   *
+   *
+   * The adjustment of the stream segments as such works the following.
+   *
+   * If the demuxer segment start is bigger than the period start, this
+   * means that we have to drop some media at the beginning of the current
+   * period, e.g. because a seek into the middle of the period has
+   * happened. The amount of media to drop is the difference between the
+   * period start and the demuxer segment start, and as each period starts
+   * again from 0, this difference is going to be the actual stream's
+   * segment start. As all timestamps of the stream are shifted by the
+   * presentation time offset, we will also have to move the segment start
+   * by that offset.
+   *
+   * Likewise, the demuxer segment stop value is adjusted in the same
+   * fashion.
+   *
+   * Now the running time and stream time at the stream's segment start has
+   * to be the one that is stored inside the demuxer's segment, which means
+   * that segment.base and segment.time have to be copied over (done just
+   * above)
+   *
+   *
+   * If the demuxer segment start is smaller than the period start time,
+   * this means that the whole period is inside the segment. As each period
+   * starts timestamps from 0, and additionally timestamps are shifted by
+   * the presentation time offset, the stream's first timestamp (and as such
+   * the stream's segment start) has to be the presentation time offset.
+   * The stream time at the segment start is supposed to be the stream time
+   * of the period start according to the demuxer segment, so the stream
+   * segment's time would be set to that. The same goes for the stream
+   * segment's base, which is supposed to be the running time of the period
+   * start according to the demuxer's segment.
+   *
+   * The same logic applies for negative rates with the segment stop and
+   * the period stop time (which gets clamped).
+   *
+   *
+   * For the first case where not the complete period is inside the segment,
+   * the segment time and base as calculated by the second case would be
+   * equivalent.
+   */
+  GST_DEBUG_OBJECT (demux, "Using demux segment %" GST_SEGMENT_FORMAT,
+      &demux->segment);
+  GST_DEBUG_OBJECT (demux,
+      "period_start: %" GST_TIME_FORMAT " offset: %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (period_start), GST_TIME_ARGS (offset));
+  /* note for readers:
+   * Since stream->parse_segment is initially a copy of demux->segment,
+   * only the values that need updating are modified below. */
+  if (first_and_live) {
+    /* If first and live, demuxer did seek to the current position already */
+    stream->parse_segment.start = demux->segment.start - period_start + offset;
+    if (GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+      stream->parse_segment.stop = demux->segment.stop - period_start + offset;
+    /* FIXME : Do we need to handle negative rates for this ? */
+    stream->parse_segment.position = stream->parse_segment.start;
+  } else if (demux->segment.start > period_start) {
+    /* seek within a period */
+    stream->parse_segment.start = demux->segment.start - period_start + offset;
+    if (GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+      stream->parse_segment.stop = demux->segment.stop - period_start + offset;
+    if (stream->parse_segment.rate >= 0)
+      stream->parse_segment.position = offset;
+    else
+      stream->parse_segment.position = stream->parse_segment.stop;
+  } else {
+    stream->parse_segment.start = offset;
+    if (GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+      stream->parse_segment.stop = demux->segment.stop - period_start + offset;
+    if (stream->parse_segment.rate >= 0) {
+      stream->parse_segment.position = offset;
+      stream->parse_segment.base =
+          gst_segment_to_running_time (&demux->segment, GST_FORMAT_TIME,
+          period_start);
+    } else {
+      stream->parse_segment.position = stream->parse_segment.stop;
+      stream->parse_segment.base =
+          gst_segment_to_running_time (&demux->segment, GST_FORMAT_TIME,
+          period_start + demux->segment.stop - demux->segment.start);
+    }
+    stream->parse_segment.time =
+        gst_segment_to_stream_time (&demux->segment, GST_FORMAT_TIME,
+        period_start);
+  }
+
+  stream->send_segment = TRUE;
+
+  GST_DEBUG_OBJECT (stream, "Prepared segment %" GST_SEGMENT_FORMAT,
+      &stream->parse_segment);
+}
+
+/* Segment lock hold */
+static void
+update_buffer_pts_and_demux_position_locked (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer)
+{
+  GstClockTimeDiff pos;
+
+  GST_DEBUG_OBJECT (stream, "stream->fragment.stream_time %" GST_STIME_FORMAT,
+      GST_STIME_ARGS (stream->fragment.stream_time));
+
+  pos = stream->fragment.stream_time;
+
+  if (GST_CLOCK_STIME_IS_VALID (pos)) {
+    GstClockTime offset =
+        gst_adaptive_demux2_stream_get_presentation_offset (demux, stream);
+
+    pos += offset;
+
+    if (pos < 0) {
+      GST_WARNING_OBJECT (stream, "Clamping segment and buffer position to 0");
+      pos = 0;
+    }
+
+    GST_BUFFER_PTS (buffer) = pos;
+  } else {
+    GST_BUFFER_PTS (buffer) = GST_CLOCK_TIME_NONE;
+  }
+
+  GST_DEBUG_OBJECT (stream, "Buffer/stream position is now: %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (GST_BUFFER_PTS (buffer)));
+}
+
+/* Must be called from the scheduler context */
+GstFlowReturn
+gst_adaptive_demux2_stream_push_buffer (GstAdaptiveDemux2Stream * stream,
+    GstBuffer * buffer)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  GstFlowReturn ret = GST_FLOW_OK;
+  gboolean discont = FALSE;
+  /* Pending events */
+  GstEvent *pending_caps = NULL, *pending_segment = NULL, *pending_tags =
+      NULL, *stream_start = NULL, *buffer_gap = NULL;
+  GList *pending_events = NULL;
+
+  if (stream->compute_segment) {
+    gst_adaptive_demux2_stream_prepare_segment (demux, stream,
+        stream->first_and_live);
+    stream->compute_segment = FALSE;
+    stream->first_and_live = FALSE;
+  }
+
+  if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DROPPABLE)) {
+    GST_DEBUG_OBJECT (stream, "Creating gap event for droppable buffer");
+    buffer_gap =
+        gst_event_new_gap (GST_BUFFER_PTS (buffer),
+        GST_BUFFER_DURATION (buffer));
+  }
+
+  if (stream->first_fragment_buffer) {
+    GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+    if (demux->segment.rate < 0)
+      /* Set DISCONT flag for every first buffer in reverse playback mode
+       * as each fragment for its own has to be reversed */
+      discont = TRUE;
+    update_buffer_pts_and_demux_position_locked (demux, stream, buffer);
+    GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+    GST_LOG_OBJECT (stream, "Handling initial buffer %" GST_PTR_FORMAT, buffer);
+
+    /* Do we need to inject STREAM_START and SEGMENT events ?
+     *
+     * This can happen when a stream is restarted, and also when switching to a
+     * variant which needs a header (in which case downloading_header will be
+     * TRUE)
+     */
+    if (G_UNLIKELY (stream->send_segment || stream->downloading_header)) {
+      GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+      pending_segment = gst_event_new_segment (&stream->parse_segment);
+      gst_event_set_seqnum (pending_segment, demux->priv->segment_seqnum);
+      stream->send_segment = FALSE;
+      GST_DEBUG_OBJECT (stream, "Sending %" GST_PTR_FORMAT, pending_segment);
+      GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+      stream_start = gst_event_new_stream_start ("bogus");
+      if (demux->have_group_id)
+        gst_event_set_group_id (stream_start, demux->group_id);
+    }
+  } else {
+    GST_BUFFER_PTS (buffer) = GST_CLOCK_TIME_NONE;
+  }
+  stream->first_fragment_buffer = FALSE;
+
+  if (stream->discont) {
+    discont = TRUE;
+    stream->discont = FALSE;
+  }
+
+  if (discont) {
+    GST_DEBUG_OBJECT (stream, "Marking fragment as discontinuous");
+    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+  } else {
+    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
+  }
+
+  GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;
+  GST_BUFFER_DTS (buffer) = GST_CLOCK_TIME_NONE;
+  if (G_UNLIKELY (stream->pending_caps)) {
+    pending_caps = gst_event_new_caps (stream->pending_caps);
+    gst_caps_unref (stream->pending_caps);
+    stream->pending_caps = NULL;
+  }
+
+  if (G_UNLIKELY (stream->pending_tags)) {
+    GstTagList *tags = stream->pending_tags;
+
+    stream->pending_tags = NULL;
+
+    if (tags)
+      pending_tags = gst_event_new_tag (tags);
+  }
+  if (G_UNLIKELY (stream->pending_events)) {
+    pending_events = stream->pending_events;
+    stream->pending_events = NULL;
+  }
+
+  /* Do not push events or buffers holding the manifest lock */
+  if (G_UNLIKELY (stream_start)) {
+    GST_DEBUG_OBJECT (stream,
+        "Setting stream start: %" GST_PTR_FORMAT, stream_start);
+    gst_pad_send_event (stream->parsebin_sink, stream_start);
+  }
+  if (G_UNLIKELY (pending_caps)) {
+    GST_DEBUG_OBJECT (stream,
+        "Setting pending caps: %" GST_PTR_FORMAT, pending_caps);
+    gst_pad_send_event (stream->parsebin_sink, pending_caps);
+  }
+  if (G_UNLIKELY (pending_segment)) {
+    GST_DEBUG_OBJECT (stream,
+        "Sending pending seg: %" GST_PTR_FORMAT, pending_segment);
+    gst_pad_send_event (stream->parsebin_sink, pending_segment);
+  }
+  if (G_UNLIKELY (pending_tags)) {
+    GST_DEBUG_OBJECT (stream,
+        "Sending pending tags: %" GST_PTR_FORMAT, pending_tags);
+    gst_pad_send_event (stream->parsebin_sink, pending_tags);
+  }
+  while (pending_events != NULL) {
+    GstEvent *event = pending_events->data;
+
+    GST_DEBUG_OBJECT (stream, "Sending pending event: %" GST_PTR_FORMAT, event);
+    if (!gst_pad_send_event (stream->parsebin_sink, event))
+      GST_ERROR_OBJECT (stream, "Failed to send pending event");
+
+    pending_events = g_list_delete_link (pending_events, pending_events);
+  }
+
+  GST_DEBUG_OBJECT (stream,
+      "About to push buffer of size %" G_GSIZE_FORMAT " offset %"
+      G_GUINT64_FORMAT, gst_buffer_get_size (buffer),
+      GST_BUFFER_OFFSET (buffer));
+
+  ret = gst_pad_chain (stream->parsebin_sink, buffer);
+
+  if (buffer_gap) {
+    GST_DEBUG_OBJECT (stream, "Sending %" GST_PTR_FORMAT, buffer_gap);
+    gst_pad_send_event (stream->parsebin_sink, buffer_gap);
+  }
+
+  if (G_UNLIKELY (stream->cancelled)) {
+    GST_LOG_OBJECT (demux, "Stream was cancelled");
+    return GST_FLOW_FLUSHING;
+  }
+
+  GST_LOG_OBJECT (stream, "Push result: %d %s", ret, gst_flow_get_name (ret));
+
+  return ret;
+}
+
+static GstFlowReturn
+gst_adaptive_demux2_stream_parse_buffer (GstAdaptiveDemux2Stream * stream,
+    GstBuffer * buffer)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  GstFlowReturn ret = GST_FLOW_OK;
+
+  /* do not make any changes if the stream is cancelled */
+  if (G_UNLIKELY (stream->cancelled)) {
+    gst_buffer_unref (buffer);
+    return GST_FLOW_FLUSHING;
+  }
+
+  /* starting_fragment is set to TRUE at the beginning of
+   * _stream_download_fragment()
+   * /!\ If there is a header/index being downloaded, then this will
+   * be TRUE for the first one ... but FALSE for the remaining ones,
+   * including the *actual* fragment ! */
+  if (stream->starting_fragment) {
+    stream->starting_fragment = FALSE;
+    if (klass->start_fragment != NULL && !klass->start_fragment (demux, stream))
+      return GST_FLOW_ERROR;
+  }
+
+  stream->download_total_bytes += gst_buffer_get_size (buffer);
+
+  GST_TRACE_OBJECT (stream,
+      "Received %s buffer of size %" G_GSIZE_FORMAT, uritype (stream),
+      gst_buffer_get_size (buffer));
+
+  ret = klass->data_received (demux, stream, buffer);
+
+  if (ret != GST_FLOW_OK) {
+    GST_DEBUG_OBJECT (stream, "data_received returned %s",
+        gst_flow_get_name (ret));
+
+    if (ret == GST_FLOW_FLUSHING) {
+      /* do not make any changes if the stream is cancelled */
+      if (G_UNLIKELY (stream->cancelled)) {
+        return ret;
+      }
+    }
+
+    if (ret < GST_FLOW_EOS) {
+      GstEvent *eos = gst_event_new_eos ();
+      GST_ELEMENT_FLOW_ERROR (demux, ret);
+
+      GST_DEBUG_OBJECT (stream, "Pushing EOS to parser");
+
+      /* TODO push this on all pads */
+      gst_event_set_seqnum (eos, stream->demux->priv->segment_seqnum);
+      gst_pad_send_event (stream->parsebin_sink, eos);
+      ret = GST_FLOW_ERROR;
+
+      stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_ERRORED;
+    }
+  }
+
+  return ret;
+}
+
+/* Calculate the low and high download buffering watermarks
+ * in time as MAX (low-watermark-time, low-watermark-fragments) and
+ * MIN (high-watermark-time, high-watermark-fragments) respectively
+ */
+static void
+calculate_track_thresholds (GstAdaptiveDemux * demux,
+    GstClockTime fragment_duration, GstClockTime * low_threshold,
+    GstClockTime * high_threshold)
+{
+  GST_OBJECT_LOCK (demux);
+  *low_threshold = demux->buffering_low_watermark_fragments * fragment_duration;
+  if (*low_threshold == 0 ||
+      (demux->buffering_low_watermark_time != 0
+          && demux->buffering_low_watermark_time > *low_threshold)) {
+    *low_threshold = demux->buffering_low_watermark_time;
+  }
+
+  *high_threshold =
+      demux->buffering_high_watermark_fragments * fragment_duration;
+  if (*high_threshold == 0 || (demux->buffering_high_watermark_time != 0
+          && demux->buffering_high_watermark_time < *high_threshold)) {
+    *high_threshold = demux->buffering_high_watermark_time;
+  }
+
+  /* Make sure the low and high thresholds are less than the maximum buffering
+   * time */
+  if (*high_threshold == 0 ||
+      (demux->max_buffering_time != 0
+          && demux->max_buffering_time < *high_threshold)) {
+    *high_threshold = demux->max_buffering_time;
+  }
+
+  if (*low_threshold == 0 ||
+      (demux->max_buffering_time != 0
+          && demux->max_buffering_time < *low_threshold)) {
+    *low_threshold = demux->max_buffering_time;
+  }
+
+  /* Make sure the high threshold is higher than (or equal to) the low threshold.
+   * It's OK if they are the same, as the minimum download is 1 fragment */
+  if (*high_threshold == 0 ||
+      (*low_threshold != 0 && *low_threshold > *high_threshold)) {
+    *high_threshold = *low_threshold;
+  }
+  GST_OBJECT_UNLOCK (demux);
+}
+
+static gboolean
+gst_adaptive_demux2_stream_wait_for_output_space (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstClockTime fragment_duration)
+{
+  gboolean need_to_wait = TRUE;
+  gboolean have_any_tracks = FALSE;
+  gboolean have_active_tracks = FALSE;
+  gboolean have_filled_inactive = FALSE;
+  gboolean update_buffering = FALSE;
+
+  GstClockTime low_threshold = 0, high_threshold = 0;
+  GList *iter;
+
+  calculate_track_thresholds (demux, fragment_duration,
+      &low_threshold, &high_threshold);
+
+  /* If there are no tracks at all, don't wait. If there are no active
+   * tracks, keep filling until at least one track is full. If there
+   * are active tracks, require that they are all full */
+  TRACKS_LOCK (demux);
+  for (iter = stream->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) iter->data;
+
+    /* Update the buffering threshold */
+    if (low_threshold != track->buffering_threshold) {
+      /* The buffering threshold for this track changed, make sure to
+       * re-check buffering status */
+      update_buffering = TRUE;
+      track->buffering_threshold = low_threshold;
+    }
+
+    have_any_tracks = TRUE;
+    if (track->active)
+      have_active_tracks = TRUE;
+
+    if (track->level_time < high_threshold) {
+      if (track->active) {
+        need_to_wait = FALSE;
+        GST_DEBUG_OBJECT (demux,
+            "stream %p track %s has level %" GST_TIME_FORMAT
+            " - needs more data (target %" GST_TIME_FORMAT
+            ") (fragment duration %" GST_TIME_FORMAT ")", stream,
+            track->stream_id, GST_TIME_ARGS (track->level_time),
+            GST_TIME_ARGS (high_threshold), GST_TIME_ARGS (fragment_duration));
+        continue;
+      }
+    } else if (!track->active) {        /* track is over threshold and inactive */
+      have_filled_inactive = TRUE;
+    }
+
+    GST_DEBUG_OBJECT (demux,
+        "stream %p track %s active (%d) has level %" GST_TIME_FORMAT,
+        stream, track->stream_id, track->active,
+        GST_TIME_ARGS (track->level_time));
+  }
+
+  /* If there are no tracks, don't wait (we might need data to create them),
+   * or if there are active tracks that need more data to hit the threshold,
+   * don't wait. Otherwise it means all active tracks are full and we should wait */
+  if (!have_any_tracks) {
+    GST_DEBUG_OBJECT (demux, "stream %p has no tracks - not waiting", stream);
+    need_to_wait = FALSE;
+  } else if (!have_active_tracks && !have_filled_inactive) {
+    GST_DEBUG_OBJECT (demux,
+        "stream %p has inactive tracks that need more data - not waiting",
+        stream);
+    need_to_wait = FALSE;
+  }
+
+  if (need_to_wait) {
+    for (iter = stream->tracks; iter; iter = iter->next) {
+      GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) iter->data;
+      track->waiting_del_level = high_threshold;
+      GST_DEBUG_OBJECT (demux,
+          "Waiting for queued data on stream %p track %s to drop below %"
+          GST_TIME_FORMAT " (fragment duration %" GST_TIME_FORMAT ")",
+          stream, track->stream_id, GST_TIME_ARGS (track->waiting_del_level),
+          GST_TIME_ARGS (fragment_duration));
+    }
+  }
+
+  if (update_buffering) {
+    demux_update_buffering_locked (demux);
+    demux_post_buffering_locked (demux);
+  }
+
+  TRACKS_UNLOCK (demux);
+
+  return need_to_wait;
+}
+
+static GstAdaptiveDemuxTrack *
+match_parsebin_to_track (GstAdaptiveDemux2Stream * stream, GstPad * pad)
+{
+  GList *tmp;
+  GstAdaptiveDemuxTrack *found_track = NULL, *first_matched_track = NULL;
+  gint num_possible_tracks = 0;
+  GstStream *gst_stream;
+  const gchar *internal_stream_id;
+  GstStreamType stream_type;
+
+  gst_stream = gst_pad_get_stream (pad);
+
+  /* FIXME: Edward: Added assertion because I don't see in what cases we would
+   * end up with a pad from parsebin which wouldn't have an associated
+   * GstStream. */
+  g_assert (gst_stream);
+
+  internal_stream_id = gst_stream_get_stream_id (gst_stream);
+  stream_type = gst_stream_get_stream_type (gst_stream);
+
+  GST_DEBUG_OBJECT (pad,
+      "Trying to match pad from parsebin with internal streamid %s and caps %"
+      GST_PTR_FORMAT, GST_STR_NULL (internal_stream_id),
+      gst_stream_get_caps (gst_stream));
+
+  /* Try to match directly by the track's pending upstream_stream_id */
+  for (tmp = stream->tracks; tmp; tmp = tmp->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) tmp->data;
+
+    if (stream_type != GST_STREAM_TYPE_UNKNOWN && track->type != stream_type)
+      continue;
+
+    GST_DEBUG_OBJECT (pad, "track upstream_stream_id: %s",
+        track->upstream_stream_id);
+
+    if (first_matched_track == NULL)
+      first_matched_track = track;
+    num_possible_tracks++;
+
+    /* If this track has a desired upstream stream id, match on it */
+    if (track->upstream_stream_id == NULL ||
+        g_strcmp0 (track->upstream_stream_id, internal_stream_id)) {
+      /* This is not the track for this pad */
+      continue;
+    }
+
+    /* Remove pending upstream id (we have matched it for the pending
+     * stream_id) */
+    g_free (track->upstream_stream_id);
+    track->upstream_stream_id = NULL;
+    found_track = track;
+    break;
+  }
+
+  if (found_track == NULL) {
+    /* If we arrive here, it means the stream is switching pads after
+     * the stream has already started running */
+    /* No track is currently waiting for this particular stream id -
+     * try and match an existing linked track. If there's only 1 possible,
+     * take it. */
+    if (num_possible_tracks == 1 && first_matched_track != NULL) {
+      GST_LOG_OBJECT (pad, "Only one possible track to link to");
+      found_track = first_matched_track;
+    }
+  }
+
+  if (found_track == NULL) {
+    /* TODO: There are multiple possible tracks, need to match based
+     * on language code and caps. Have you found a stream like this? */
+    GST_FIXME_OBJECT (pad, "Need to match track based on caps and language");
+  }
+
+  if (found_track != NULL) {
+    if (!gst_pad_is_linked (found_track->sinkpad)) {
+      GST_LOG_OBJECT (pad, "Linking to track pad %" GST_PTR_FORMAT,
+          found_track->sinkpad);
+
+      if (gst_pad_link (pad, found_track->sinkpad) != GST_PAD_LINK_OK) {
+        GST_ERROR_OBJECT (pad, "Couldn't connect to track sinkpad");
+        /* FIXME : Do something if we can't link ? */
+      }
+    } else {
+      /* Store pad as pending link */
+      GST_LOG_OBJECT (pad,
+          "Remembering pad to be linked when current pad is unlinked");
+      g_assert (found_track->pending_srcpad == NULL);
+      found_track->pending_srcpad = gst_object_ref (pad);
+    }
+  }
+
+  if (gst_stream)
+    gst_object_unref (gst_stream);
+
+  return found_track;
+}
+
+static void
+parsebin_pad_removed_cb (GstElement * parsebin, GstPad * pad,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GList *iter;
+  GST_DEBUG_OBJECT (stream, "pad %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+  /* Remove from pending source pad */
+  TRACKS_LOCK (stream->demux);
+  for (iter = stream->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *track = iter->data;
+    if (track->pending_srcpad == pad) {
+      gst_object_unref (track->pending_srcpad);
+      track->pending_srcpad = NULL;
+      break;
+    }
+  }
+  TRACKS_UNLOCK (stream->demux);
+}
+
+static void
+parsebin_pad_added_cb (GstElement * parsebin, GstPad * pad,
+    GstAdaptiveDemux2Stream * stream)
+{
+  if (!GST_PAD_IS_SRC (pad))
+    return;
+
+  GST_DEBUG_OBJECT (stream, "pad %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+  if (!match_parsebin_to_track (stream, pad))
+    GST_WARNING_OBJECT (pad, "Found no track to handle pad");
+
+  GST_DEBUG_OBJECT (stream->demux, "Done linking");
+}
+
+static void
+parsebin_deep_element_added_cb (GstBin * parsebin, GstBin * unused,
+    GstElement * element, GstAdaptiveDemux * demux)
+{
+  if (G_OBJECT_TYPE (element) == tsdemux_type) {
+    GST_DEBUG_OBJECT (demux, "Overriding tsdemux ignore-pcr to TRUE");
+    g_object_set (element, "ignore-pcr", TRUE, NULL);
+  }
+}
+
+/* must be called with manifest_lock taken */
+static gboolean
+gst_adaptive_demux2_stream_create_parser (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+
+  if (stream->parsebin == NULL) {
+    GstEvent *event;
+
+    GST_DEBUG_OBJECT (demux, "Setting up new parsing source");
+
+    /* Workaround to detect if tsdemux is being used */
+    if (tsdemux_type == 0) {
+      GstElement *element = gst_element_factory_make ("tsdemux", NULL);
+      if (element) {
+        tsdemux_type = G_OBJECT_TYPE (element);
+        gst_object_unref (element);
+      }
+    }
+
+    stream->parsebin = gst_element_factory_make ("parsebin", NULL);
+    if (tsdemux_type)
+      g_signal_connect (stream->parsebin, "deep-element-added",
+          (GCallback) parsebin_deep_element_added_cb, demux);
+    gst_bin_add (GST_BIN_CAST (demux), stream->parsebin);
+    stream->parsebin_sink =
+        gst_element_get_static_pad (stream->parsebin, "sink");
+    stream->pad_added_id = g_signal_connect (stream->parsebin, "pad-added",
+        G_CALLBACK (parsebin_pad_added_cb), stream);
+    stream->pad_removed_id = g_signal_connect (stream->parsebin, "pad-removed",
+        G_CALLBACK (parsebin_pad_removed_cb), stream);
+
+    event = gst_event_new_stream_start ("bogus");
+    if (demux->have_group_id)
+      gst_event_set_group_id (event, demux->group_id);
+
+    gst_pad_send_event (stream->parsebin_sink, event);
+
+    /* Not sure if these need to be outside the manifest lock: */
+    gst_element_sync_state_with_parent (stream->parsebin);
+    stream->last_status_code = 200;     /* default to OK */
+  }
+  return TRUE;
+}
+
+static void
+on_download_cancellation (DownloadRequest * request, DownloadRequestState state,
+    GstAdaptiveDemux2Stream * stream)
+{
+}
+
+static void
+on_download_error (DownloadRequest * request, DownloadRequestState state,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  guint last_status_code = request->status_code;
+  gboolean live;
+
+  stream->download_active = FALSE;
+  stream->last_status_code = last_status_code;
+
+  GST_DEBUG_OBJECT (stream,
+      "Download finished with error, request state %d http status %u, dc %d",
+      request->state, last_status_code, stream->download_error_count);
+
+  live = gst_adaptive_demux_is_live (demux);
+  if (((last_status_code / 100 == 4 && live)
+          || last_status_code / 100 == 5)) {
+    /* 4xx/5xx */
+    /* if current position is before available start, switch to next */
+    if (!gst_adaptive_demux2_stream_has_next_fragment (demux, stream))
+      goto flushing;
+
+    if (live) {
+      gint64 range_start, range_stop;
+
+      if (!gst_adaptive_demux_get_live_seek_range (demux, &range_start,
+              &range_stop))
+        goto flushing;
+
+      if (demux->segment.position < range_start) {
+        GstFlowReturn ret;
+
+        GST_DEBUG_OBJECT (stream, "Retrying once with next segment");
+        gst_adaptive_demux2_stream_finish_download (stream, GST_FLOW_EOS, NULL);
+
+        GST_DEBUG_OBJECT (demux, "Calling update_fragment_info");
+
+        ret = gst_adaptive_demux2_stream_update_fragment_info (demux, stream);
+        GST_DEBUG_OBJECT (stream, "update_fragment_info ret: %s",
+            gst_flow_get_name (ret));
+
+        if (ret == GST_FLOW_OK)
+          goto again;
+
+      } else if (demux->segment.position > range_stop) {
+        /* wait a bit to be in range, we don't have any locks at that point */
+        GstClockTime wait_time =
+            gst_adaptive_demux2_stream_get_fragment_waiting_time (demux,
+            stream);
+        if (wait_time > 0) {
+          GST_DEBUG_OBJECT (stream,
+              "Download waiting for %" GST_TIME_FORMAT,
+              GST_TIME_ARGS (wait_time));
+          g_assert (stream->pending_cb_id == 0);
+          GST_LOG_OBJECT (stream, "Scheduling delayed load_a_fragment() call");
+          stream->pending_cb_id =
+              gst_adaptive_demux_loop_call_delayed (demux->priv->scheduler_task,
+              wait_time,
+              (GSourceFunc) gst_adaptive_demux2_stream_load_a_fragment,
+              gst_object_ref (stream), (GDestroyNotify) gst_object_unref);
+        }
+      }
+    }
+
+  flushing:
+    if (stream->download_error_count >= MAX_DOWNLOAD_ERROR_COUNT) {
+      /* looks like there is no way of knowing when a live stream has ended
+       * Have to assume we are falling behind and cause a manifest reload */
+      GST_DEBUG_OBJECT (stream, "Converting error of live stream to EOS");
+      gst_adaptive_demux2_stream_handle_playlist_eos (stream);
+      return;
+    }
+  } else if (!gst_adaptive_demux2_stream_has_next_fragment (demux, stream)) {
+    /* If this is the last fragment, consider failures EOS and not actual
+     * errors. Due to rounding errors in the durations, the last fragment
+     * might not actually exist */
+    GST_DEBUG_OBJECT (stream, "Converting error for last fragment to EOS");
+    gst_adaptive_demux2_stream_handle_playlist_eos (stream);
+    return;
+  } else {
+    /* retry same segment */
+    if (++stream->download_error_count > MAX_DOWNLOAD_ERROR_COUNT) {
+      gst_adaptive_demux2_stream_error (stream);
+      return;
+    }
+    goto again;
+  }
+
+again:
+  /* wait a short time in case the server needs a bit to recover */
+  GST_LOG_OBJECT (stream,
+      "Scheduling delayed load_a_fragment() call to retry in 10 milliseconds");
+  g_assert (stream->pending_cb_id == 0);
+  stream->pending_cb_id = gst_adaptive_demux_loop_call_delayed (demux->priv->scheduler_task, 10 * GST_MSECOND,  /* Retry in 10 ms */
+      (GSourceFunc) gst_adaptive_demux2_stream_load_a_fragment,
+      gst_object_ref (stream), (GDestroyNotify) gst_object_unref);
+}
+
+static void
+update_stream_bitrate (GstAdaptiveDemux2Stream * stream,
+    DownloadRequest * request)
+{
+  GstClockTimeDiff last_download_duration;
+  guint64 fragment_bytes_downloaded = request->content_received;
+
+  /* The stream last_download time tracks the full download time for now */
+  stream->last_download_time =
+      GST_CLOCK_DIFF (request->download_request_time,
+      request->download_end_time);
+
+  /* Here we only track the time the data took to arrive and ignore request delay, so we can estimate bitrate */
+  last_download_duration =
+      GST_CLOCK_DIFF (request->download_start_time, request->download_end_time);
+
+  /* If the entire response arrived in the first buffer
+   * though, include the request time to get a valid
+   * bitrate estimate */
+  if (last_download_duration < 2 * stream->last_download_time)
+    last_download_duration = stream->last_download_time;
+
+  if (last_download_duration > 0) {
+    stream->last_bitrate =
+        gst_util_uint64_scale (fragment_bytes_downloaded,
+        8 * GST_SECOND, last_download_duration);
+
+    GST_DEBUG_OBJECT (stream,
+        "Updated stream bitrate. %" G_GUINT64_FORMAT
+        " bytes. download time %" GST_TIME_FORMAT " bitrate %"
+        G_GUINT64_FORMAT " bps", fragment_bytes_downloaded,
+        GST_TIME_ARGS (last_download_duration), stream->last_bitrate);
+  }
+}
+
+static void
+on_download_progress (DownloadRequest * request, DownloadRequestState state,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  GstBuffer *buffer = download_request_take_buffer (request);
+
+  if (buffer) {
+    GstFlowReturn ret;
+
+    GST_DEBUG_OBJECT (stream,
+        "Handling buffer of %" G_GSIZE_FORMAT
+        " bytes of ongoing download progress - %" G_GUINT64_FORMAT " / %"
+        G_GUINT64_FORMAT " bytes", gst_buffer_get_size (buffer),
+        request->content_received, request->content_length);
+
+    /* Drop the request lock when parsing data. FIXME: Check and comment why this is needed */
+    download_request_unlock (request);
+    ret = gst_adaptive_demux2_stream_parse_buffer (stream, buffer);
+    download_request_lock (request);
+
+    if (stream->state != GST_ADAPTIVE_DEMUX2_STREAM_STATE_DOWNLOADING)
+      return;
+
+    if (ret != GST_FLOW_OK) {
+      GST_DEBUG_OBJECT (stream,
+          "Buffer parsing returned: %d %s. Aborting download", ret,
+          gst_flow_get_name (ret));
+
+      if (!stream->downloading_header && !stream->downloading_index)
+        update_stream_bitrate (stream, request);
+
+      downloadhelper_cancel_request (demux->download_helper, request);
+
+      /* cancellation is async, so recycle our download request to avoid races */
+      download_request_unref (stream->download_request);
+      stream->download_request = download_request_new ();
+
+      gst_adaptive_demux2_stream_finish_download (stream, ret, NULL);
+    }
+  }
+}
+
+static void
+on_download_complete (DownloadRequest * request, DownloadRequestState state,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstFlowReturn ret = GST_FLOW_OK;
+  GstBuffer *buffer;
+
+  stream->download_active = FALSE;
+
+  if (G_UNLIKELY (stream->cancelled))
+    return;
+
+  GST_DEBUG_OBJECT (stream,
+      "Stream %p %s download for %s is complete with state %d",
+      stream, uritype (stream), request->uri, request->state);
+
+  /* Update bitrate for fragment downloads */
+  if (!stream->downloading_header && !stream->downloading_index)
+    update_stream_bitrate (stream, request);
+
+  buffer = download_request_take_buffer (request);
+  if (buffer)
+    ret = gst_adaptive_demux2_stream_parse_buffer (stream, buffer);
+
+  GST_DEBUG_OBJECT (stream,
+      "%s download finished: %s ret %d %s. Stream state %d", uritype (stream),
+      request->uri, ret, gst_flow_get_name (ret), stream->state);
+
+  if (stream->state != GST_ADAPTIVE_DEMUX2_STREAM_STATE_DOWNLOADING)
+    return;
+
+  g_assert (stream->pending_cb_id == 0);
+  gst_adaptive_demux2_stream_finish_download (stream, ret, NULL);
+}
+
+/* must be called from the scheduler context
+ *
+ * Will submit the request only, which will complete asynchronously
+ */
+static GstFlowReturn
+gst_adaptive_demux2_stream_begin_download_uri (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, const gchar * uri, gint64 start,
+    gint64 end)
+{
+  DownloadRequest *request = stream->download_request;
+
+  GST_DEBUG_OBJECT (demux,
+      "Downloading %s uri: %s, range:%" G_GINT64_FORMAT " - %" G_GINT64_FORMAT,
+      uritype (stream), uri, start, end);
+
+  if (!gst_adaptive_demux2_stream_create_parser (stream))
+    return GST_FLOW_ERROR;
+
+  /* Configure our download request */
+  download_request_set_uri (request, uri, start, end);
+
+  if (stream->downloading_header || stream->downloading_index) {
+    download_request_set_callbacks (request,
+        (DownloadRequestEventCallback) on_download_complete,
+        (DownloadRequestEventCallback) on_download_error,
+        (DownloadRequestEventCallback) on_download_cancellation,
+        (DownloadRequestEventCallback) NULL, stream);
+  } else {
+    download_request_set_callbacks (request,
+        (DownloadRequestEventCallback) on_download_complete,
+        (DownloadRequestEventCallback) on_download_error,
+        (DownloadRequestEventCallback) on_download_cancellation,
+        (DownloadRequestEventCallback) on_download_progress, stream);
+  }
+
+  if (!downloadhelper_submit_request (demux->download_helper,
+          demux->manifest_uri, DOWNLOAD_FLAG_NONE, request, NULL))
+    return GST_FLOW_ERROR;
+
+  stream->download_active = TRUE;
+
+  return GST_FLOW_OK;
+}
+
+/* must be called from the scheduler context */
+static GstFlowReturn
+gst_adaptive_demux2_stream_download_fragment (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  gchar *url = NULL;
+
+  /* FIXME :  */
+  /* THERE ARE THREE DIFFERENT VARIABLES FOR THE "BEGINNING" OF A FRAGMENT ! */
+  if (stream->starting_fragment) {
+    GST_DEBUG_OBJECT (stream, "Downloading %s%s%s",
+        stream->fragment.uri ? "FRAGMENT " : "",
+        stream->need_header && stream->fragment.header_uri ? "HEADER " : "",
+        stream->need_index && stream->fragment.index_uri ? "INDEX" : "");
+
+    if (stream->fragment.uri == NULL && stream->fragment.header_uri == NULL &&
+        stream->fragment.index_uri == NULL)
+      goto no_url_error;
+
+    stream->first_fragment_buffer = TRUE;
+    stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_DOWNLOADING;
+  }
+
+  if (stream->need_header && stream->fragment.header_uri != NULL) {
+
+    /* Set the need_index flag when we start the header if we'll also need the index */
+    stream->need_index = (stream->fragment.index_uri != NULL);
+
+    GST_DEBUG_OBJECT (stream, "Fetching header %s %" G_GINT64_FORMAT "-%"
+        G_GINT64_FORMAT, stream->fragment.header_uri,
+        stream->fragment.header_range_start, stream->fragment.header_range_end);
+
+    stream->downloading_header = TRUE;
+
+    return gst_adaptive_demux2_stream_begin_download_uri (demux, stream,
+        stream->fragment.header_uri, stream->fragment.header_range_start,
+        stream->fragment.header_range_end);
+  }
+
+  /* check if we have an index */
+  if (stream->need_index && stream->fragment.index_uri != NULL) {
+    GST_DEBUG_OBJECT (stream,
+        "Fetching index %s %" G_GINT64_FORMAT "-%" G_GINT64_FORMAT,
+        stream->fragment.index_uri,
+        stream->fragment.index_range_start, stream->fragment.index_range_end);
+
+    stream->downloading_index = TRUE;
+
+    return gst_adaptive_demux2_stream_begin_download_uri (demux, stream,
+        stream->fragment.index_uri, stream->fragment.index_range_start,
+        stream->fragment.index_range_end);
+  }
+
+  url = stream->fragment.uri;
+  GST_DEBUG_OBJECT (stream, "Got url '%s' for stream %p", url, stream);
+  if (!url)
+    return GST_FLOW_OK;
+
+  /* Download the actual fragment, either in chunks or in one go */
+  stream->first_fragment_buffer = TRUE;
+
+  if (klass->need_another_chunk && klass->need_another_chunk (stream)
+      && stream->fragment.chunk_size != 0) {
+    /* Handle chunk downloading */
+    gint64 range_start = stream->fragment.range_start;
+    gint64 range_end = stream->fragment.range_end;
+    gint chunk_size = stream->fragment.chunk_size;
+    gint64 chunk_end;
+
+    /* HTTP ranges are inclusive for the end */
+    if (chunk_size != -1) {
+      chunk_end = range_start + chunk_size - 1;
+      if (range_end != -1 && range_end < chunk_end)
+        chunk_end = range_end;
+    } else {
+      chunk_end = range_end;
+    }
+
+    GST_DEBUG_OBJECT (stream,
+        "Starting chunked download %s %" G_GINT64_FORMAT "-%" G_GINT64_FORMAT,
+        url, range_start, chunk_end);
+    return gst_adaptive_demux2_stream_begin_download_uri (demux, stream, url,
+        range_start, chunk_end);
+  }
+
+  /* regular single chunk download */
+  stream->fragment.chunk_size = 0;
+
+  return gst_adaptive_demux2_stream_begin_download_uri (demux, stream, url,
+      stream->fragment.range_start, stream->fragment.range_end);
+
+no_url_error:
+  {
+    GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+        (_("Failed to get fragment URL.")),
+        ("An error happened when getting fragment URL"));
+    return GST_FLOW_ERROR;
+  }
+}
+
+static gboolean
+gst_adaptive_demux2_stream_push_event (GstAdaptiveDemux2Stream * stream,
+    GstEvent * event)
+{
+  gboolean ret = TRUE;
+  GstPad *pad;
+
+  /* If there's a parsebin, push the event through it */
+  if (stream->parsebin_sink != NULL) {
+    pad = gst_object_ref (stream->parsebin_sink);
+    GST_DEBUG_OBJECT (pad, "Pushing event %" GST_PTR_FORMAT, event);
+    ret = gst_pad_send_event (pad, gst_event_ref (event));
+    gst_object_unref (pad);
+  }
+
+  /* If the event is EOS, ensure that all tracks are EOS. This catches
+   * the case where the parsebin hasn't parsed anything yet (we switched
+   * to a never before used track right near EOS, or it didn't parse enough
+   * to create pads and be able to send EOS through to the tracks.
+   *
+   * We don't need to care about any other events
+   */
+  if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+    GstAdaptiveDemux *demux = stream->demux;
+    GList *iter;
+
+    TRACKS_LOCK (demux);
+    for (iter = stream->tracks; iter; iter = iter->next) {
+      GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) iter->data;
+      ret &= gst_pad_send_event (track->sinkpad, gst_event_ref (event));
+    }
+    TRACKS_UNLOCK (demux);
+  }
+
+  gst_event_unref (event);
+  return ret;
+}
+
+static void
+gst_adaptive_demux2_stream_error (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  GstMessage *msg;
+  GstStructure *details;
+
+  details = gst_structure_new_empty ("details");
+  gst_structure_set (details, "http-status-code", G_TYPE_UINT,
+      stream->last_status_code, NULL);
+
+  stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_ERRORED;
+
+  if (stream->last_error) {
+    gchar *debug = g_strdup_printf ("Error on stream %s",
+        GST_OBJECT_NAME (stream));
+    msg =
+        gst_message_new_error_with_details (GST_OBJECT_CAST (demux),
+        stream->last_error, debug, details);
+    GST_ERROR_OBJECT (stream, "Download error: %s",
+        stream->last_error->message);
+    g_free (debug);
+  } else {
+    GError *err = g_error_new (GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND,
+        _("Couldn't download fragments"));
+    msg =
+        gst_message_new_error_with_details (GST_OBJECT_CAST (demux), err,
+        "Fragment downloading has failed consecutive times", details);
+    g_error_free (err);
+    GST_ERROR_OBJECT (stream,
+        "Download error: Couldn't download fragments, too many failures");
+  }
+
+  gst_element_post_message (GST_ELEMENT_CAST (demux), msg);
+}
+
+/* Called when a stream reaches the end of a playback segment */
+static void
+gst_adaptive_demux2_stream_end_of_manifest (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  GstFlowReturn combined =
+      gst_adaptive_demux_period_combine_stream_flows (demux->input_period);
+
+  GST_DEBUG_OBJECT (stream, "Combined flow %s", gst_flow_get_name (combined));
+
+  if (demux->priv->outputs) {
+    GstEvent *eos = gst_event_new_eos ();
+
+    GST_DEBUG_OBJECT (stream, "Stream is EOS. Stopping.");
+    stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_EOS;
+
+    gst_event_set_seqnum (eos, stream->demux->priv->segment_seqnum);
+    gst_adaptive_demux2_stream_push_event (stream, eos);
+  } else {
+    GST_ERROR_OBJECT (demux, "Can't push EOS on non-exposed pad");
+    gst_adaptive_demux2_stream_error (stream);
+  }
+
+  if (combined == GST_FLOW_EOS && gst_adaptive_demux_has_next_period (demux)) {
+    GST_DEBUG_OBJECT (stream, "Next period available, advancing");
+    gst_adaptive_demux_advance_period (demux);
+  }
+}
+
+static gboolean
+gst_adaptive_demux2_stream_reload_manifest_cb (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+
+  gboolean is_live = gst_adaptive_demux_is_live (demux);
+
+  stream->pending_cb_id = 0;
+
+  /* Refetch the playlist now after we waited */
+  /* FIXME: Make this manifest update async and handle it on completion */
+  if (!is_live && gst_adaptive_demux_update_manifest (demux) == GST_FLOW_OK) {
+    GST_DEBUG_OBJECT (demux, "Updated the playlist");
+  }
+
+  /* We were called here from a timeout, so if the load function wants to loop
+   * again, schedule an immediate callback but return G_SOURCE_REMOVE either
+   * way */
+  while (gst_adaptive_demux2_stream_next_download (stream));
+
+  return G_SOURCE_REMOVE;
+}
+
+static gboolean
+gst_adaptive_demux2_stream_on_output_space_available_cb (GstAdaptiveDemux2Stream
+    * stream)
+{
+  /* If the state already moved on, the stream was stopped, or another track
+   * already woke up and needed data */
+  if (stream->state != GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_OUTPUT_SPACE)
+    return G_SOURCE_REMOVE;
+
+  while (gst_adaptive_demux2_stream_load_a_fragment (stream));
+
+  return G_SOURCE_REMOVE;
+}
+
+void
+gst_adaptive_demux2_stream_on_output_space_available (GstAdaptiveDemux2Stream *
+    stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  GList *iter;
+
+  for (iter = stream->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *tmp_track = (GstAdaptiveDemuxTrack *) iter->data;
+    tmp_track->waiting_del_level = 0;
+  }
+
+  GST_LOG_OBJECT (stream, "Scheduling output_space_available() call");
+
+  gst_adaptive_demux_loop_call (demux->priv->scheduler_task,
+      (GSourceFunc) gst_adaptive_demux2_stream_on_output_space_available_cb,
+      gst_object_ref (stream), (GDestroyNotify) gst_object_unref);
+}
+
+void
+gst_adaptive_demux2_stream_on_manifest_update (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+
+  if (stream->state != GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_MANIFEST_UPDATE)
+    return;
+
+  g_assert (stream->pending_cb_id == 0);
+
+  GST_LOG_OBJECT (stream, "Scheduling load_a_fragment() call");
+  stream->pending_cb_id =
+      gst_adaptive_demux_loop_call (demux->priv->scheduler_task,
+      (GSourceFunc) gst_adaptive_demux2_stream_load_a_fragment,
+      gst_object_ref (stream), (GDestroyNotify) gst_object_unref);
+}
+
+static void
+gst_adaptive_demux2_stream_handle_playlist_eos (GstAdaptiveDemux2Stream *
+    stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+
+  if (gst_adaptive_demux_is_live (demux) && (demux->segment.rate == 1.0
+          || gst_adaptive_demux2_stream_in_live_seek_range (demux, stream))) {
+
+    if (!gst_adaptive_demux_has_next_period (demux)) {
+      /* Wait only if we can ensure current manifest has been expired.
+       * The meaning "we have next period" *WITH* EOS is that, current
+       * period has been ended but we can continue to the next period */
+      GST_DEBUG_OBJECT (stream,
+          "Live playlist EOS - waiting for manifest update");
+      stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_MANIFEST_UPDATE;
+      gst_adaptive_demux2_stream_wants_manifest_update (demux);
+      return;
+    }
+
+    if (stream->replaced)
+      return;
+  }
+
+  gst_adaptive_demux2_stream_end_of_manifest (stream);
+}
+
+static gboolean
+gst_adaptive_demux2_stream_load_a_fragment (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  gboolean live = gst_adaptive_demux_is_live (demux);
+  GstFlowReturn ret = GST_FLOW_OK;
+
+  stream->pending_cb_id = 0;
+
+  GST_LOG_OBJECT (stream, "entering, state = %d.", stream->state);
+
+  switch (stream->state) {
+    case GST_ADAPTIVE_DEMUX2_STREAM_STATE_RESTART:
+    case GST_ADAPTIVE_DEMUX2_STREAM_STATE_START_FRAGMENT:
+    case GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_LIVE:
+    case GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_OUTPUT_SPACE:
+    case GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_MANIFEST_UPDATE:
+      /* Get information about the fragment to download */
+      GST_DEBUG_OBJECT (demux, "Calling update_fragment_info");
+      ret = gst_adaptive_demux2_stream_update_fragment_info (demux, stream);
+      GST_DEBUG_OBJECT (stream,
+          "Fragment info update result: %d %s", ret, gst_flow_get_name (ret));
+
+      if (ret == GST_FLOW_OK)
+        stream->starting_fragment = TRUE;
+      break;
+    case GST_ADAPTIVE_DEMUX2_STREAM_STATE_DOWNLOADING:
+      break;
+    case GST_ADAPTIVE_DEMUX2_STREAM_STATE_EOS:
+      GST_ERROR_OBJECT (stream,
+          "Unexpected stream state EOS. The stream should not be running now.");
+      return FALSE;
+    default:
+      GST_ERROR_OBJECT (stream, "Unexpected stream state %d", stream->state);
+      g_assert_not_reached ();
+      break;
+  }
+
+  if (ret == GST_FLOW_OK) {
+    /* Wait for room in the output tracks */
+    if (gst_adaptive_demux2_stream_wait_for_output_space (demux, stream,
+            stream->fragment.duration)) {
+      stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_OUTPUT_SPACE;
+      return FALSE;
+    }
+  }
+
+  if (ret == GST_FLOW_OK) {
+    /* wait for live fragments to be available */
+    if (live) {
+      GstClockTime wait_time =
+          gst_adaptive_demux2_stream_get_fragment_waiting_time (demux, stream);
+      if (wait_time > 0) {
+        GST_DEBUG_OBJECT (stream,
+            "Download waiting for %" GST_TIME_FORMAT,
+            GST_TIME_ARGS (wait_time));
+
+        stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_LIVE;
+
+        GST_LOG_OBJECT (stream, "Scheduling delayed load_a_fragment() call");
+        g_assert (stream->pending_cb_id == 0);
+        stream->pending_cb_id =
+            gst_adaptive_demux_loop_call_delayed (demux->priv->scheduler_task,
+            wait_time, (GSourceFunc) gst_adaptive_demux2_stream_load_a_fragment,
+            gst_object_ref (stream), (GDestroyNotify) gst_object_unref);
+        return FALSE;
+      }
+    }
+
+    if (gst_adaptive_demux2_stream_download_fragment (stream) != GST_FLOW_OK) {
+      GST_ERROR_OBJECT (demux,
+          "Failed to begin fragment download for stream %p", stream);
+      return FALSE;
+    }
+  }
+
+  switch (ret) {
+    case GST_FLOW_OK:
+      break;                    /* all is good, let's go */
+    case GST_FLOW_EOS:
+      GST_DEBUG_OBJECT (stream, "EOS, checking to stop download loop");
+      gst_adaptive_demux2_stream_handle_playlist_eos (stream);
+      return FALSE;
+    case GST_FLOW_NOT_LINKED:
+    {
+      stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_EOS;
+
+      if (gst_adaptive_demux_period_combine_stream_flows (demux->input_period)
+          == GST_FLOW_NOT_LINKED) {
+        GST_ELEMENT_FLOW_ERROR (demux, ret);
+      }
+    }
+      break;
+
+    case GST_FLOW_FLUSHING:
+      /* Flushing is normal, the target track might have been unselected */
+      if (G_UNLIKELY (stream->cancelled))
+        return FALSE;
+
+    default:
+      if (ret <= GST_FLOW_ERROR) {
+        GST_WARNING_OBJECT (demux, "Error while downloading fragment");
+        if (++stream->download_error_count > MAX_DOWNLOAD_ERROR_COUNT) {
+          gst_adaptive_demux2_stream_error (stream);
+          return FALSE;
+        }
+
+        g_clear_error (&stream->last_error);
+
+        /* First try to update the playlist for non-live playlists
+         * in case the URIs have changed in the meantime. But only
+         * try it the first time, after that we're going to wait a
+         * a bit to not flood the server */
+        if (stream->download_error_count == 1
+            && !gst_adaptive_demux_is_live (demux)) {
+          /* TODO hlsdemux had more options to this function (boolean and err) */
+          if (gst_adaptive_demux_update_manifest (demux) == GST_FLOW_OK) {
+            /* Retry immediately, the playlist actually has changed */
+            GST_DEBUG_OBJECT (demux, "Updated the playlist");
+            return TRUE;
+          }
+        }
+
+        /* Wait half the fragment duration before retrying */
+        GST_LOG_OBJECT (stream, "Scheduling delayed reload_manifest_cb() call");
+        g_assert (stream->pending_cb_id == 0);
+        stream->pending_cb_id =
+            gst_adaptive_demux_loop_call_delayed (demux->priv->scheduler_task,
+            stream->fragment.duration / 2,
+            (GSourceFunc) gst_adaptive_demux2_stream_reload_manifest_cb,
+            gst_object_ref (stream), (GDestroyNotify) gst_object_unref);
+        return FALSE;
+      }
+      break;
+  }
+
+  return FALSE;
+}
+
+static gboolean
+gst_adaptive_demux2_stream_next_download (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  gboolean end_of_manifest = FALSE;
+
+  GST_LOG_OBJECT (stream, "Looking for next download");
+
+  /* Restarting download, figure out new position
+   * FIXME : Move this to a separate function ? */
+  if (G_UNLIKELY (stream->state == GST_ADAPTIVE_DEMUX2_STREAM_STATE_RESTART)) {
+    GstClockTimeDiff stream_time = 0;
+
+    GST_DEBUG_OBJECT (stream, "Activating stream after restart");
+
+    if (stream->parsebin_sink != NULL) {
+      /* If the parsebin already exists, we need to clear it out (if it doesn't,
+       * this is the first time we've used this stream, so it's all good) */
+      gst_adaptive_demux2_stream_push_event (stream,
+          gst_event_new_flush_start ());
+      gst_adaptive_demux2_stream_push_event (stream,
+          gst_event_new_flush_stop (FALSE));
+    }
+
+    GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+    stream_time = stream->start_position;
+
+    GST_DEBUG_OBJECT (stream, "Restarting stream at "
+        "stream position %" GST_STIME_FORMAT, GST_STIME_ARGS (stream_time));
+
+    if (GST_CLOCK_STIME_IS_VALID (stream_time)) {
+      /* TODO check return */
+      gst_adaptive_demux2_stream_seek (demux, stream, demux->segment.rate >= 0,
+          0, stream_time, &stream_time);
+      stream->current_position = stream->start_position;
+
+      GST_DEBUG_OBJECT (stream,
+          "stream_time after restart seek: %" GST_STIME_FORMAT
+          " position %" GST_STIME_FORMAT, GST_STIME_ARGS (stream_time),
+          GST_STIME_ARGS (stream->current_position));
+    }
+
+    /* Trigger (re)computation of the parsebin input segment */
+    stream->compute_segment = TRUE;
+
+    GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+    stream->discont = TRUE;
+    stream->need_header = TRUE;
+    stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_START_FRAGMENT;
+  }
+
+  /* Check if we're done with our segment */
+  GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+  if (demux->segment.rate > 0) {
+    if (GST_CLOCK_TIME_IS_VALID (demux->segment.stop)
+        && stream->current_position >= demux->segment.stop) {
+      end_of_manifest = TRUE;
+    }
+  } else {
+    if (GST_CLOCK_TIME_IS_VALID (demux->segment.start)
+        && stream->current_position <= demux->segment.start) {
+      end_of_manifest = TRUE;
+    }
+  }
+  GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+  if (end_of_manifest) {
+    gst_adaptive_demux2_stream_end_of_manifest (stream);
+    return FALSE;
+  }
+  return gst_adaptive_demux2_stream_load_a_fragment (stream);
+}
+
+static gboolean
+gst_adaptive_demux2_stream_can_start (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  if (!klass->stream_can_start)
+    return TRUE;
+  return klass->stream_can_start (demux, stream);
+}
+
+/**
+ * gst_adaptive_demux2_stream_start:
+ * @stream: a #GstAdaptiveDemux2Stream
+ *
+ * Start the given @stream. Should be called by subclasses that previously
+ * returned %FALSE in `GstAdaptiveDemux::stream_can_start()`
+ */
+void
+gst_adaptive_demux2_stream_start (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux;
+
+  g_return_if_fail (stream && stream->demux);
+
+  demux = stream->demux;
+
+  if (stream->pending_cb_id != 0 || stream->download_active) {
+    /* There is already something active / pending on this stream */
+    GST_LOG_OBJECT (stream, "Stream already running");
+    return;
+  }
+
+  /* Some streams require a delayed start, i.e. they need more information
+   * before they can actually be started */
+  if (!gst_adaptive_demux2_stream_can_start (stream)) {
+    GST_LOG_OBJECT (stream, "Stream will be started asynchronously");
+    return;
+  }
+
+  if (stream->state == GST_ADAPTIVE_DEMUX2_STREAM_STATE_EOS) {
+    GST_LOG_OBJECT (stream, "Stream is EOS already");
+    return;
+  }
+
+  if (stream->state == GST_ADAPTIVE_DEMUX2_STREAM_STATE_STOPPED ||
+      stream->state == GST_ADAPTIVE_DEMUX2_STREAM_STATE_RESTART) {
+    GST_LOG_OBJECT (stream, "Activating stream. Current state %d",
+        stream->state);
+    stream->cancelled = FALSE;
+    stream->replaced = FALSE;
+    stream->last_ret = GST_FLOW_OK;
+
+    if (stream->state == GST_ADAPTIVE_DEMUX2_STREAM_STATE_STOPPED)
+      stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_START_FRAGMENT;
+  }
+
+  GST_LOG_OBJECT (stream, "Scheduling next_download() call");
+  stream->pending_cb_id =
+      gst_adaptive_demux_loop_call (demux->priv->scheduler_task,
+      (GSourceFunc) gst_adaptive_demux2_stream_next_download,
+      gst_object_ref (stream), (GDestroyNotify) gst_object_unref);
+}
+
+void
+gst_adaptive_demux2_stream_stop (GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemux *demux = stream->demux;
+
+  GST_DEBUG_OBJECT (stream, "Stopping stream (from state %d)", stream->state);
+  stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_STOPPED;
+
+  if (stream->pending_cb_id != 0) {
+    gst_adaptive_demux_loop_cancel_call (demux->priv->scheduler_task,
+        stream->pending_cb_id);
+    stream->pending_cb_id = 0;
+  }
+
+  /* Cancel and drop the existing download request */
+  downloadhelper_cancel_request (demux->download_helper,
+      stream->download_request);
+  download_request_unref (stream->download_request);
+  stream->downloading_header = stream->downloading_index = FALSE;
+  stream->download_request = download_request_new ();
+  stream->download_active = FALSE;
+}
+
+gboolean
+gst_adaptive_demux2_stream_is_running (GstAdaptiveDemux2Stream * stream)
+{
+  if (stream->state == GST_ADAPTIVE_DEMUX2_STREAM_STATE_STOPPED)
+    return FALSE;
+  if (stream->state == GST_ADAPTIVE_DEMUX2_STREAM_STATE_RESTART)
+    return FALSE;
+  if (stream->state == GST_ADAPTIVE_DEMUX2_STREAM_STATE_EOS)
+    return FALSE;
+  return TRUE;
+}
+
+gboolean
+gst_adaptive_demux2_stream_is_selected_locked (GstAdaptiveDemux2Stream * stream)
+{
+  GList *tmp;
+
+  for (tmp = stream->tracks; tmp; tmp = tmp->next) {
+    GstAdaptiveDemuxTrack *track = tmp->data;
+    if (track->selected)
+      return TRUE;
+  }
+
+  return FALSE;
+}
+
+/**
+ * gst_adaptive_demux2_stream_is_selected:
+ * @stream: A #GstAdaptiveDemux2Stream
+ *
+ * Returns: %TRUE if any of the tracks targetted by @stream is selected
+ */
+gboolean
+gst_adaptive_demux2_stream_is_selected (GstAdaptiveDemux2Stream * stream)
+{
+  gboolean ret;
+
+  g_return_val_if_fail (stream && stream->demux, FALSE);
+
+  TRACKS_LOCK (stream->demux);
+  ret = gst_adaptive_demux2_stream_is_selected_locked (stream);
+  TRACKS_UNLOCK (stream->demux);
+
+  return ret;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-track.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux-track.c
new file mode 100644 (file)
index 0000000..0872af3
--- /dev/null
@@ -0,0 +1,891 @@
+/* GStreamer
+ *
+ * Copyright (C) 2014 Samsung Electronics. All rights reserved.
+ *   Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * Copyright (C) 2021-2022 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstadaptivedemux.h"
+#include "gstadaptivedemux-private.h"
+
+GST_DEBUG_CATEGORY_EXTERN (adaptivedemux2_debug);
+#define GST_CAT_DEFAULT adaptivedemux2_debug
+
+/* TRACKS_LOCK held
+ * Flushes all data in the track and resets it */
+void
+gst_adaptive_demux_track_flush (GstAdaptiveDemuxTrack * track)
+{
+  GST_DEBUG_OBJECT (track->demux, "Flushing track '%s' with %u queued items",
+      track->stream_id, gst_queue_array_get_length (track->queue));
+  gst_queue_array_clear (track->queue);
+
+  gst_event_store_flush (&track->sticky_events);
+
+  gst_segment_init (&track->input_segment, GST_FORMAT_TIME);
+  track->input_time = 0;
+  track->input_segment_seqnum = GST_SEQNUM_INVALID;
+
+  gst_segment_init (&track->output_segment, GST_FORMAT_TIME);
+  track->gap_position = track->gap_duration = GST_CLOCK_TIME_NONE;
+
+  track->output_time = 0;
+  track->next_position = GST_CLOCK_STIME_NONE;
+
+  track->level_bytes = 0;
+  track->level_time = 0;
+
+  track->eos = FALSE;
+
+  track->update_next_segment = FALSE;
+
+  track->output_discont = FALSE;
+}
+
+static gboolean
+_track_sink_query_function (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+  GstAdaptiveDemuxTrack *track = gst_pad_get_element_private (pad);
+  GstAdaptiveDemux *demux = track->demux;
+  gboolean ret = FALSE;
+
+  GST_DEBUG_OBJECT (pad, "query %" GST_PTR_FORMAT, query);
+
+  switch (GST_QUERY_TYPE (query)) {
+    case GST_QUERY_ACCEPT_CAPS:
+      /* Should we intersect by track caps as a safety check ? */
+      GST_DEBUG_OBJECT (demux, "We accept any caps on %s:%s",
+          GST_DEBUG_PAD_NAME (pad));
+      gst_query_set_accept_caps_result (query, TRUE);
+      ret = TRUE;
+      break;
+    default:
+      break;
+  }
+
+  return ret;
+}
+
+/* Dequeue an item from the track queue for processing
+ * TRACKS_LOCK hold */
+static gboolean
+track_dequeue_item_locked (GstAdaptiveDemux * demux,
+    GstAdaptiveDemuxTrack * track, TrackQueueItem * out_item)
+{
+  TrackQueueItem *item = gst_queue_array_peek_head_struct (track->queue);
+
+  if (item == NULL)
+    return FALSE;
+
+  *out_item = *item;
+  gst_queue_array_pop_head (track->queue);
+
+  GST_LOG_OBJECT (demux,
+      "track %s item running_time %" GST_STIME_FORMAT " end %" GST_STIME_FORMAT,
+      track->stream_id, GST_STIME_ARGS (out_item->runningtime),
+      GST_STIME_ARGS (out_item->runningtime_end));
+
+  return TRUE;
+}
+
+static inline GstClockTimeDiff my_segment_to_running_time (GstSegment * segment,
+    GstClockTime val);
+
+/* Dequeue or generate a buffer/event from the track queue and update the buffering levels
+ * TRACKS_LOCK hold */
+GstMiniObject *
+track_dequeue_data_locked (GstAdaptiveDemux * demux,
+    GstAdaptiveDemuxTrack * track, gboolean check_sticky_events)
+{
+  GstMiniObject *res = NULL;
+  gboolean is_pending_sticky = FALSE;
+  GstEvent *event;
+  GstClockTimeDiff running_time;
+  GstClockTimeDiff running_time_end;
+  gsize item_size = 0;
+
+  if (check_sticky_events) {
+    /* If there are any sticky events to send, do that before anything else */
+    event = gst_event_store_get_next_pending (&track->sticky_events);
+    if (event != NULL) {
+      res = (GstMiniObject *) event;
+      running_time = running_time_end = GST_CLOCK_STIME_NONE;
+      GST_DEBUG_OBJECT (demux,
+          "track %s dequeued pending sticky event %" GST_PTR_FORMAT,
+          track->stream_id, event);
+      is_pending_sticky = TRUE;
+      goto handle_event;
+    }
+  }
+
+  do {
+    TrackQueueItem item;
+
+    /* If we're filling a gap, generate a gap event */
+    if (track->gap_position != GST_CLOCK_TIME_NONE) {
+      GstClockTime pos = track->gap_position;
+      GstClockTime duration = track->gap_duration;
+
+      if (duration > 100 * GST_MSECOND) {
+        duration = 100 * GST_MSECOND;
+        track->gap_position += duration;
+        track->gap_duration -= duration;
+      } else {
+        /* Duration dropped below 100 ms, this is the last
+         * gap of the sequence */
+        track->gap_position = GST_CLOCK_TIME_NONE;
+        track->gap_duration = GST_CLOCK_TIME_NONE;
+      }
+
+      res = (GstMiniObject *) gst_event_new_gap (pos, duration);
+      running_time = my_segment_to_running_time (&track->output_segment, pos);
+      running_time_end =
+          my_segment_to_running_time (&track->output_segment, pos + duration);
+      item_size = 0;
+      break;
+    }
+
+    /* Otherwise, try and pop something from the item queue */
+    if (!track_dequeue_item_locked (demux, track, &item))
+      return NULL;
+
+    res = item.item;
+    running_time = item.runningtime;
+    running_time_end = item.runningtime_end;
+    item_size = item.size;
+
+    /* Special case for a gap event, to drain them out little-by-little.
+     * See if it can be output directly, otherwise set up to fill a gap and loop again */
+    if (GST_IS_EVENT (res) && GST_EVENT_TYPE (res) == GST_EVENT_GAP
+        && GST_CLOCK_STIME_IS_VALID (running_time)) {
+      GstClockTime pos, duration;
+      GstClockTime cstart, cstop;
+
+      gst_event_parse_gap (GST_EVENT_CAST (res), &pos, &duration);
+
+      /* Handle a track with no duration as 0 duration. This can only
+       * happen if an element in parsebin emits such a gap event */
+      if (duration == GST_CLOCK_TIME_NONE)
+        duration = 0;
+
+      /* We *can* end up with a gap outside of the segment range due to segment
+       * base updating when (re)activating a track. In that case, just let the gap
+       * event flow out normally.
+       * Otherwise, this gap crosses into the segment, clip it to the ends and set up to fill the gap */
+      if (!gst_segment_clip (&track->output_segment, GST_FORMAT_TIME, pos,
+              pos + duration, &cstart, &cstop))
+        break;
+
+      pos = cstart;
+      duration = cstop - cstart;
+
+      GST_DEBUG_OBJECT (demux,
+          "track %s Starting gap for runningtime %" GST_STIME_FORMAT
+          " - clipped position %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
+          track->stream_id, GST_STIME_ARGS (running_time), GST_TIME_ARGS (pos),
+          GST_TIME_ARGS (duration));
+
+      track->gap_position = pos;
+      track->gap_duration = duration;
+
+      gst_mini_object_unref (res);
+      res = NULL;
+      continue;
+    }
+  } while (res == NULL);
+
+handle_event:
+  if (GST_IS_EVENT (res)) {
+    event = (GstEvent *) res;
+
+    switch (GST_EVENT_TYPE (event)) {
+      case GST_EVENT_SEGMENT:
+        gst_event_copy_segment (event, &track->output_segment);
+        if (track->update_next_segment) {
+          GstClockTimeDiff global_output_position =
+              demux->priv->global_output_position;
+
+          GST_DEBUG ("track %s: Override segment for running time %"
+              GST_STIME_FORMAT " : %" GST_PTR_FORMAT, track->stream_id,
+              GST_STIME_ARGS (global_output_position), event);
+          gst_event_unref (event);
+          gst_segment_set_running_time (&track->output_segment, GST_FORMAT_TIME,
+              global_output_position);
+
+          event = gst_event_new_segment (&track->output_segment);
+          gst_event_set_seqnum (event, track->demux->priv->segment_seqnum);
+
+          res = (GstMiniObject *) event;
+          running_time = global_output_position;
+
+          track->update_next_segment = FALSE;
+
+          /* Replace the stored sticky event with this one */
+          is_pending_sticky = FALSE;
+        }
+        break;
+      default:
+        break;
+    }
+
+    /* Store any sticky event in the cache, unless this is already an event
+     * from the pending sticky_events store */
+    if (!is_pending_sticky && GST_EVENT_IS_STICKY (event)) {
+      GST_DEBUG_OBJECT (demux,
+          "track %s Storing sticky event %" GST_PTR_FORMAT,
+          track->stream_id, event);
+      gst_event_store_insert_event (&track->sticky_events, event, FALSE);
+    }
+  }
+
+  /* Update track buffering levels */
+  if (running_time != GST_CLOCK_STIME_NONE) {
+    GstClockTimeDiff output_time;
+
+    track->output_time = running_time;
+    if (running_time_end != GST_CLOCK_TIME_NONE)
+      track->output_time = running_time_end;
+
+    output_time = MAX (track->output_time, demux->priv->global_output_position);
+    if (track->input_time >= output_time)
+      track->level_time = track->input_time - output_time;
+    else
+      track->level_time = 0;
+
+    GST_LOG_OBJECT (demux,
+        "track %s input_time:%" GST_STIME_FORMAT " output_time:%"
+        GST_STIME_FORMAT " level:%" GST_TIME_FORMAT,
+        track->stream_id, GST_STIME_ARGS (track->input_time),
+        GST_STIME_ARGS (output_time), GST_TIME_ARGS (track->level_time));
+  } else {
+    GST_LOG_OBJECT (demux, "track %s popping untimed item %" GST_PTR_FORMAT,
+        track->stream_id, res);
+  }
+
+  track->level_bytes -= item_size;
+
+  /* FIXME: This logic should be in adaptive demux, not the track */
+  if (track->level_time < track->waiting_del_level) {
+    /* Wake up download loop */
+    GstAdaptiveDemux2Stream *stream =
+        find_stream_for_track_locked (demux, track);
+
+    g_assert (stream != NULL);
+
+    gst_adaptive_demux2_stream_on_output_space_available (stream);
+  }
+
+  return res;
+}
+
+void
+gst_adaptive_demux_track_drain_to (GstAdaptiveDemuxTrack * track,
+    GstClockTime drain_running_time)
+{
+  GstAdaptiveDemux *demux = track->demux;
+
+  GST_DEBUG_OBJECT (demux,
+      "Track '%s' draining to running time %" GST_STIME_FORMAT,
+      track->stream_id, GST_STIME_ARGS (drain_running_time));
+
+  while (track->next_position == GST_CLOCK_STIME_NONE ||
+      track->next_position < drain_running_time) {
+    TrackQueueItem *item;
+    GstMiniObject *next_mo = NULL;
+
+    /* If we're in a gap, and the end time is after the target running time,
+     * exit */
+    if (track->gap_position != GST_CLOCK_TIME_NONE) {
+      GstClockTimeDiff running_time_end;
+      GstClockTimeDiff gap_end = track->gap_position;
+
+      /* In reverse playback, the start of the gap is the highest
+       * running time, so only add duration for forward play */
+      if (track->output_segment.rate > 0)
+        gap_end += track->gap_duration;
+
+      running_time_end =
+          my_segment_to_running_time (&track->output_segment, gap_end);
+
+      if (running_time_end >= drain_running_time) {
+        GST_DEBUG_OBJECT (demux,
+            "Track '%s' drained to GAP with running time %" GST_STIME_FORMAT,
+            track->stream_id, GST_STIME_ARGS (running_time_end));
+        return;
+      }
+
+      /* Otherwise this gap is complete, so skip it */
+      track->gap_position = GST_CLOCK_STIME_NONE;
+    }
+
+    /* Otherwise check what's enqueued */
+    item = gst_queue_array_peek_head_struct (track->queue);
+    /* track is empty, we're done */
+    if (item == NULL) {
+      GST_DEBUG_OBJECT (demux, "Track '%s' completely drained",
+          track->stream_id);
+      return;
+    }
+
+    /* If the item has a running time, and it's after the drain_running_time
+     * we're done. */
+    if (item->runningtime != GST_CLOCK_STIME_NONE
+        && item->runningtime >= drain_running_time) {
+      GST_DEBUG_OBJECT (demux, "Track '%s' drained to item %" GST_PTR_FORMAT
+          " with running time %" GST_STIME_FORMAT,
+          track->stream_id, item->item, GST_STIME_ARGS (item->runningtime));
+      return;
+    }
+
+    GST_DEBUG_OBJECT (demux, "Track '%s' discarding %" GST_PTR_FORMAT
+        " with running time %" GST_STIME_FORMAT,
+        track->stream_id, item->item, GST_STIME_ARGS (item->runningtime));
+
+    /* Dequeue the item and discard. Sticky events
+     * will be collected by the dequeue function, gaps will be started.
+     * If it's a buffer, mark the track as discont to get the flag set
+     * on the next output buffer */
+    next_mo = track_dequeue_data_locked (demux, track, FALSE);
+    if (GST_IS_BUFFER (next_mo)) {
+      track->output_discont = TRUE;
+    }
+    gst_mini_object_unref (next_mo);
+    gst_adaptive_demux_track_update_next_position (track);
+  }
+
+  GST_DEBUG_OBJECT (demux,
+      "Track '%s' drained to running time %" GST_STIME_FORMAT, track->stream_id,
+      GST_STIME_ARGS (track->next_position));
+}
+
+static inline GstClockTimeDiff
+my_segment_to_running_time (GstSegment * segment, GstClockTime val)
+{
+  GstClockTimeDiff res = GST_CLOCK_STIME_NONE;
+
+  if (GST_CLOCK_TIME_IS_VALID (val)) {
+    gboolean sign =
+        gst_segment_to_running_time_full (segment, GST_FORMAT_TIME, val, &val);
+    if (sign > 0)
+      res = val;
+    else if (sign < 0)
+      res = -val;
+  }
+  return res;
+}
+
+/* Queues an item on a track queue and updates the buffering levels
+ * TRACKS_LOCK hold */
+static void
+track_queue_data_locked (GstAdaptiveDemux * demux,
+    GstAdaptiveDemuxTrack * track, GstMiniObject * object, gsize size,
+    GstClockTime timestamp, GstClockTime duration)
+{
+  TrackQueueItem item;
+
+  item.item = object;
+  item.size = size;
+  item.runningtime = GST_CLOCK_STIME_NONE;
+  item.runningtime_end = GST_CLOCK_STIME_NONE;
+
+  if (timestamp != GST_CLOCK_TIME_NONE) {
+    GstClockTimeDiff output_time;
+
+    /* Set the running time of the item */
+    item.runningtime =
+        my_segment_to_running_time (&track->input_segment, timestamp);
+
+    /* Update segment position (include duration if valid) */
+    track->input_segment.position = timestamp;
+    if (GST_CLOCK_TIME_IS_VALID (duration)) {
+      track->input_segment.position += duration;
+      item.runningtime_end =
+          my_segment_to_running_time (&track->input_segment,
+          track->input_segment.position);
+    }
+
+    /* Update track input time and level */
+    track->input_time =
+        my_segment_to_running_time (&track->input_segment,
+        track->input_segment.position);
+
+    output_time = MAX (track->output_time, demux->priv->global_output_position);
+    if (track->input_time >= output_time)
+      track->level_time = track->input_time - output_time;
+    else
+      track->level_time = 0;
+
+    GST_LOG_OBJECT (demux,
+        "track %s input_time:%" GST_STIME_FORMAT " output_time:%"
+        GST_STIME_FORMAT " level:%" GST_TIME_FORMAT,
+        track->stream_id, GST_STIME_ARGS (track->input_time),
+        GST_STIME_ARGS (track->output_time), GST_TIME_ARGS (track->level_time));
+  }
+  track->level_bytes += size;
+  gst_queue_array_push_tail_struct (track->queue, &item);
+
+  /* If we were waiting for this track to add something, notify output thread */
+  /* FIXME: This should be in adaptive demux */
+  if (track->waiting_add) {
+    g_cond_signal (&demux->priv->tracks_add);
+  }
+}
+
+static GstFlowReturn
+_track_sink_chain_function (GstPad * pad, GstObject * parent,
+    GstBuffer * buffer)
+{
+  GstAdaptiveDemuxTrack *track = gst_pad_get_element_private (pad);
+  GstAdaptiveDemux *demux = track->demux;
+  GstClockTime ts;
+
+  GST_DEBUG_OBJECT (pad, "buffer %" GST_PTR_FORMAT, buffer);
+
+  TRACKS_LOCK (demux);
+
+  ts = GST_BUFFER_DTS_OR_PTS (buffer);
+
+  /* Buffers coming out of parsebin *should* always be timestamped (it's the
+   * goal of parsebin after all). The tracks will use that (converted to
+   * running-time) in order to track position and buffering levels.
+   *
+   * Unfortunately there are valid cases were the parsers won't be able to
+   * timestamp all frames (due to the underlying formats or muxing). For those
+   * cases, we use the last incoming timestamp (via the track input GstSegment
+   * position):
+   *
+   * * If buffers were previously received, that segment position will
+   *   correspond to the last timestamped-buffer PTS/DTS
+   *
+   * * If *no* buffers were previously received, the segment position *should*
+   *   correspond to the valid initial position (in buffer timestamps). If not
+   *   set, we need to bail out.
+   */
+  if (!GST_CLOCK_TIME_IS_VALID (ts)) {
+    if (GST_CLOCK_TIME_IS_VALID (track->input_segment.position)) {
+      GST_WARNING_OBJECT (pad,
+          "buffer doesn't have any pts or dts, using segment position (%"
+          GST_TIME_FORMAT ")", GST_TIME_ARGS (track->input_segment.position));
+      ts = track->input_segment.position;
+    } else {
+      GST_ERROR_OBJECT (pad, "initial buffer doesn't have any pts or dts !");
+      gst_buffer_unref (buffer);
+      TRACKS_UNLOCK (demux);
+      return GST_FLOW_ERROR;
+    }
+  }
+
+  if (GST_CLOCK_TIME_IS_VALID (track->input_segment.position) &&
+      ts > track->input_segment.position &&
+      ts > track->input_segment.start &&
+      ts - track->input_segment.position > 100 * GST_MSECOND) {
+    GstClockTime duration = ts - track->input_segment.position;
+    GstEvent *gap = gst_event_new_gap (track->input_segment.position, duration);
+    /* Insert gap event to ensure coherent interleave */
+    GST_DEBUG_OBJECT (pad,
+        "Inserting gap for %" GST_TIME_FORMAT " vs %" GST_TIME_FORMAT,
+        GST_TIME_ARGS (ts), GST_TIME_ARGS (track->input_segment.position));
+    track_queue_data_locked (demux, track, (GstMiniObject *) gap, 0,
+        track->input_segment.position, duration);
+  }
+
+  track_queue_data_locked (demux, track, (GstMiniObject *) buffer,
+      gst_buffer_get_size (buffer), ts, GST_BUFFER_DURATION (buffer));
+
+  /* Recalculate buffering */
+  demux_update_buffering_locked (demux);
+  demux_post_buffering_locked (demux);
+  /* UNLOCK */
+  TRACKS_UNLOCK (demux);
+
+  return GST_FLOW_OK;
+}
+
+static gboolean
+_track_sink_event_function (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+  GstAdaptiveDemuxTrack *track = gst_pad_get_element_private (pad);
+  GstAdaptiveDemux *demux = track->demux;
+  GstClockTime timestamp = GST_CLOCK_TIME_NONE;
+  GstClockTime duration = GST_CLOCK_TIME_NONE;
+  gboolean drop = FALSE;
+
+  GST_DEBUG_OBJECT (pad, "event %" GST_PTR_FORMAT, event);
+
+  TRACKS_LOCK (demux);
+
+  switch (GST_EVENT_TYPE (event)) {
+    case GST_EVENT_STREAM_COLLECTION:
+    {
+      /* Replace upstream collection with demux collection */
+      GST_DEBUG_OBJECT (pad, "Dropping stream-collection, we send our own");
+      drop = TRUE;
+      break;
+    }
+    case GST_EVENT_STREAM_START:
+    {
+      GST_DEBUG_OBJECT (pad, "Dropping stream-start, we send our own");
+      if (track->eos) {
+        gint i, len;
+        /* Find and drop latest EOS if present */
+        len = gst_queue_array_get_length (track->queue);
+        for (i = len - 1; i >= 0; i--) {
+          TrackQueueItem *item =
+              gst_queue_array_peek_nth_struct (track->queue, i);
+          if (GST_IS_EVENT (item->item)
+              && GST_EVENT_TYPE (item->item) == GST_EVENT_EOS) {
+            TrackQueueItem sub;
+            GST_DEBUG_OBJECT (pad, "Removing previously received EOS (pos:%d)",
+                i);
+            if (gst_queue_array_drop_struct (track->queue, i, &sub))
+              gst_mini_object_unref (sub.item);
+            break;
+          }
+        }
+        track->eos = FALSE;
+      }
+      drop = TRUE;
+      break;
+    }
+    case GST_EVENT_EOS:
+    {
+      if (track->pending_srcpad != NULL) {
+        GST_DEBUG_OBJECT (pad,
+            "Dropping EOS because we have a pending pad switch");
+        drop = TRUE;
+      } else {
+        track->eos = TRUE;
+      }
+      break;
+    }
+    case GST_EVENT_FLUSH_STOP:
+    case GST_EVENT_FLUSH_START:
+    {
+      /* Drop flush events */
+      drop = TRUE;
+      break;
+    }
+    default:
+      break;
+  }
+
+  if (drop || !GST_EVENT_IS_SERIALIZED (event)) {
+    GST_DEBUG_OBJECT (pad, "dropping event %s", GST_EVENT_TYPE_NAME (event));
+    gst_event_unref (event);
+    TRACKS_UNLOCK (demux);
+    /* Silently "accept" them */
+    return TRUE;
+  }
+
+  switch (GST_EVENT_TYPE (event)) {
+    case GST_EVENT_SEGMENT:
+    {
+      guint64 seg_seqnum = gst_event_get_seqnum (event);
+
+      if (track->input_segment_seqnum == seg_seqnum) {
+        GST_DEBUG_OBJECT (pad, "Ignoring duplicate segment");
+        gst_event_unref (event);
+        TRACKS_UNLOCK (demux);
+
+        return TRUE;
+      }
+
+      track->input_segment_seqnum = seg_seqnum;
+      gst_event_copy_segment (event, &track->input_segment);
+      if (track->input_segment.rate >= 0)
+        track->input_segment.position = track->input_segment.start;
+      else
+        track->input_segment.position = track->input_segment.stop;
+      GST_DEBUG_OBJECT (pad, "track %s stored segment %" GST_SEGMENT_FORMAT,
+          track->stream_id, &track->input_segment);
+      timestamp = track->input_segment.position;
+
+      break;
+    }
+    case GST_EVENT_GAP:
+    {
+      gst_event_parse_gap (event, &timestamp, &duration);
+
+      if (!GST_CLOCK_TIME_IS_VALID (timestamp)) {
+        GST_DEBUG_OBJECT (pad, "Dropping gap event with invalid timestamp");
+        goto drop_ok;
+      }
+
+      break;
+    }
+    default:
+      break;
+  }
+
+  track_queue_data_locked (demux, track, (GstMiniObject *) event, 0,
+      timestamp, duration);
+
+  /* Recalculate buffering */
+  demux_update_buffering_locked (demux);
+  demux_post_buffering_locked (demux);
+
+  TRACKS_UNLOCK (demux);
+
+  return TRUE;
+
+  /* errors */
+drop_ok:
+  {
+    gst_event_unref (event);
+    TRACKS_UNLOCK (demux);
+    return TRUE;
+  }
+}
+
+static void
+track_sinkpad_unlinked_cb (GstPad * sinkpad, GstPad * parsebin_srcpad,
+    GstAdaptiveDemuxTrack * track)
+{
+  GST_DEBUG_OBJECT (sinkpad, "Got unlinked from %s:%s",
+      GST_DEBUG_PAD_NAME (parsebin_srcpad));
+
+  if (track->pending_srcpad) {
+    GST_DEBUG_OBJECT (sinkpad, "linking to pending pad %s:%s",
+        GST_DEBUG_PAD_NAME (track->pending_srcpad));
+
+    if (gst_pad_link (track->pending_srcpad, sinkpad) != GST_PAD_LINK_OK) {
+      GST_ERROR_OBJECT (sinkpad, "could not link pending pad !");
+    }
+    gst_object_unref (track->pending_srcpad);
+    track->pending_srcpad = NULL;
+  }
+}
+
+/* TRACKS_LOCK held
+ * Call this to update the track next_position with timed data  */
+void
+gst_adaptive_demux_track_update_next_position (GstAdaptiveDemuxTrack * track)
+{
+  guint i, len;
+
+  /* If filling a gap, the next position is the gap position */
+  if (track->gap_position != GST_CLOCK_TIME_NONE) {
+    track->next_position =
+        my_segment_to_running_time (&track->output_segment,
+        track->gap_position);
+    return;
+  }
+
+  len = gst_queue_array_get_length (track->queue);
+  for (i = 0; i < len; i++) {
+    TrackQueueItem *item = gst_queue_array_peek_nth_struct (track->queue, i);
+
+    if (item->runningtime != GST_CLOCK_STIME_NONE) {
+      GST_DEBUG_OBJECT (track->demux,
+          "Track '%s' next position %" GST_STIME_FORMAT, track->stream_id,
+          GST_STIME_ARGS (item->runningtime));
+      track->next_position = item->runningtime;
+      return;
+    }
+  }
+  track->next_position = GST_CLOCK_STIME_NONE;
+
+  GST_DEBUG_OBJECT (track->demux,
+      "Track '%s' doesn't have any pending timed data", track->stream_id);
+}
+
+static void
+_demux_track_free (GstAdaptiveDemuxTrack * track)
+{
+  GST_DEBUG_OBJECT (track->demux, "freeing track %p '%s'", track,
+      track->stream_id);
+
+  g_free (track->stream_id);
+  g_free (track->upstream_stream_id);
+
+  if (track->pending_srcpad)
+    gst_object_unref (track->pending_srcpad);
+
+  if (track->generic_caps)
+    gst_caps_unref (track->generic_caps);
+  gst_object_unref (track->stream_object);
+  if (track->tags)
+    gst_tag_list_unref (track->tags);
+  gst_queue_array_free (track->queue);
+
+  gst_event_store_deinit (&track->sticky_events);
+
+  if (track->element != NULL) {
+    gst_element_set_state (track->element, GST_STATE_NULL);
+    gst_bin_remove (GST_BIN_CAST (track->demux), track->element);
+  }
+
+  g_free (track);
+}
+
+GstAdaptiveDemuxTrack *
+gst_adaptive_demux_track_ref (GstAdaptiveDemuxTrack * track)
+{
+  g_return_val_if_fail (track != NULL, NULL);
+  GST_TRACE ("%p %d -> %d", track, track->ref_count, track->ref_count + 1);
+  g_atomic_int_inc (&track->ref_count);
+
+  return track;
+}
+
+void
+gst_adaptive_demux_track_unref (GstAdaptiveDemuxTrack * track)
+{
+  g_return_if_fail (track != NULL);
+
+  GST_TRACE ("%p %d -> %d", track, track->ref_count, track->ref_count - 1);
+  if (g_atomic_int_dec_and_test (&track->ref_count)) {
+    _demux_track_free (track);
+  }
+}
+
+static void
+_track_queue_item_clear (TrackQueueItem * item)
+{
+  if (item->item) {
+    gst_mini_object_unref ((GstMiniObject *) item->item);
+    item->item = NULL;
+  }
+}
+
+/* Internal function which actually adds the elements to the demuxer */
+gboolean
+gst_adaptive_demux_track_add_elements (GstAdaptiveDemuxTrack * track,
+    guint period_num)
+{
+  GstAdaptiveDemux *demux = track->demux;
+  gchar *internal_name;
+  guint i, len;
+
+  internal_name =
+      g_strdup_printf ("track-period%d-%s", period_num, track->stream_id);
+  len = strlen (internal_name);
+  for (i = 0; i < len; i++)
+    if (internal_name[i] == ' ')
+      internal_name[i] = '_';
+  track->element = gst_bin_new (internal_name);
+  g_free (internal_name);
+
+  internal_name =
+      g_strdup_printf ("track-period%d-sink-%s", period_num, track->stream_id);
+  len = strlen (internal_name);
+  for (i = 0; i < len; i++)
+    if (internal_name[i] == ' ')
+      internal_name[i] = '_';
+  track->sinkpad = gst_pad_new (internal_name, GST_PAD_SINK);
+  g_signal_connect (track->sinkpad, "unlinked",
+      (GCallback) track_sinkpad_unlinked_cb, track);
+  g_free (internal_name);
+  gst_element_add_pad (GST_ELEMENT_CAST (track->element), track->sinkpad);
+  gst_pad_set_element_private (track->sinkpad, track);
+  gst_pad_set_chain_function (track->sinkpad, _track_sink_chain_function);
+  gst_pad_set_event_function (track->sinkpad, _track_sink_event_function);
+  gst_pad_set_query_function (track->sinkpad, _track_sink_query_function);
+
+  if (!gst_bin_add (GST_BIN_CAST (demux), track->element)) {
+    track->element = NULL;
+    return FALSE;
+  }
+
+  gst_element_sync_state_with_parent (track->element);
+  return TRUE;
+}
+
+/**
+ * gst_adaptive_demux_track_new:
+ * @demux: a #GstAdaptiveDemux
+ * @type: a #GstStreamType
+ * @flags: a #GstStreamFlags
+ * @stream_id: (transfer none): The stream id for the new track
+ * @caps: (transfer full): The caps for the track
+ * @tags: (allow-none) (transfer full): The tags for the track
+ *
+ * Create and register a new #GstAdaptiveDemuxTrack
+ *
+ * Returns: (transfer none) The new track
+ */
+GstAdaptiveDemuxTrack *
+gst_adaptive_demux_track_new (GstAdaptiveDemux * demux,
+    GstStreamType type,
+    GstStreamFlags flags, gchar * stream_id, GstCaps * caps, GstTagList * tags)
+{
+  GstAdaptiveDemuxTrack *track;
+
+  g_return_val_if_fail (stream_id != NULL, NULL);
+  g_return_val_if_fail (type && type != GST_STREAM_TYPE_UNKNOWN, NULL);
+
+
+  GST_DEBUG_OBJECT (demux, "type:%s stream_id:%s caps:%" GST_PTR_FORMAT,
+      gst_stream_type_get_name (type), stream_id, caps);
+
+  track = g_new0 (GstAdaptiveDemuxTrack, 1);
+  g_atomic_int_set (&track->ref_count, 1);
+  track->demux = demux;
+  track->type = type;
+  track->flags = flags;
+  track->stream_id = g_strdup (stream_id);
+  track->generic_caps = caps;
+  track->stream_object = gst_stream_new (stream_id, caps, type, flags);
+  if (tags) {
+    track->tags = gst_tag_list_ref (tags);
+    gst_stream_set_tags (track->stream_object, tags);
+  }
+
+  track->selected = FALSE;
+  track->active = FALSE;
+  track->draining = FALSE;
+
+  track->queue = gst_queue_array_new_for_struct (sizeof (TrackQueueItem), 50);
+  gst_queue_array_set_clear_func (track->queue,
+      (GDestroyNotify) _track_queue_item_clear);
+
+  gst_event_store_init (&track->sticky_events);
+
+  track->waiting_add = TRUE;
+  track->waiting_del_level = 0;
+
+  /* We have no fragment duration yet, so the buffering threshold is just the
+   * low watermark in time for now */
+  GST_OBJECT_LOCK (demux);
+  track->buffering_threshold = demux->buffering_low_watermark_time;
+  GST_OBJECT_UNLOCK (demux);
+
+  gst_segment_init (&track->input_segment, GST_FORMAT_TIME);
+  track->input_time = 0;
+  track->input_segment_seqnum = GST_SEQNUM_INVALID;
+
+  gst_segment_init (&track->output_segment, GST_FORMAT_TIME);
+  track->gap_position = track->gap_duration = GST_CLOCK_TIME_NONE;
+
+  track->output_time = 0;
+  track->next_position = GST_CLOCK_STIME_NONE;
+
+  track->update_next_segment = FALSE;
+
+  track->level_bytes = 0;
+  track->level_time = 0;
+
+  return track;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux.c
new file mode 100644 (file)
index 0000000..a805012
--- /dev/null
@@ -0,0 +1,4217 @@
+/* GStreamer
+ *
+ * Copyright (C) 2014 Samsung Electronics. All rights reserved.
+ *   Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * Copyright (C) 2021-2022 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:plugin-adaptivedemux2
+ * @short_description: Next Generation adaptive demuxers
+ *
+ * What is an adaptive demuxer?  Adaptive demuxers are special demuxers in the
+ * sense that they don't actually demux data received from upstream but download
+ * the data themselves.
+ *
+ * Adaptive formats (HLS, DASH, MSS) are composed of a manifest file and a set
+ * of fragments. The manifest describes the available media and the sequence of
+ * fragments to use. Each fragment contains a small part of the media (typically
+ * only a few seconds). It is possible for the manifest to have the same media
+ * available in different configurations (bitrates for example) so that the
+ * client can select the one that best suits its scenario (network fluctuation,
+ * hardware requirements...).
+ *
+ * Furthermore, that manifest can also specify alternative medias (such as audio
+ * or subtitle tracks in different languages). Only the fragments for the
+ * requested selection will be download.
+ *
+ * These elements can therefore "adapt" themselves to the network conditions (as
+ * opposed to the server doing that adaptation) and user choices, which is why
+ * they are called "adaptive" demuxers.
+ *
+ * Note: These elements require a "streams-aware" container to work
+ * (i.e. urisourcebin, decodebin3, playbin3, or any bin/pipeline with the
+ * GST_BIN_FLAG_STREAMS_AWARE flag set).
+ *
+ * Subclasses:
+ * While GstAdaptiveDemux is responsible for the workflow, it knows nothing
+ * about the intrinsics of the subclass formats, so the subclasses are
+ * responsible for maintaining the manifest data structures and stream
+ * information.
+ *
+ * Since: 1.22
+ */
+
+/*
+See the adaptive-demuxer.md design documentation for more information
+
+MT safety.
+The following rules were observed while implementing MT safety in adaptive demux:
+1. If a variable is accessed from multiple threads and at least one thread
+writes to it, then all the accesses needs to be done from inside a critical section.
+2. If thread A wants to join thread B then at the moment it calls gst_task_join
+it must not hold any mutexes that thread B might take.
+
+Adaptive demux API can be called from several threads. More, adaptive demux
+starts some threads to monitor the download of fragments. In order to protect
+accesses to shared variables (demux and streams) all the API functions that
+can be run in different threads will need to get a mutex (manifest_lock)
+when they start and release it when they end. Because some of those functions
+can indirectly call other API functions (eg they can generate events or messages
+that are processed in the same thread) the manifest_lock must be recursive.
+
+The manifest_lock will serialize the public API making access to shared
+variables safe. But some of these functions will try at some moment to join
+threads created by adaptive demux, or to change the state of src elements
+(which will block trying to join the src element streaming thread). Because
+of rule 2, those functions will need to release the manifest_lock during the
+call of gst_task_join. During this time they can be interrupted by other API calls.
+For example, during the precessing of a seek event, gst_adaptive_demux_stop_tasks
+is called and this will join all threads. In order to prevent interruptions
+during such period, all the API functions will also use a second lock: api_lock.
+This will be taken at the beginning of the function and released at the end,
+but this time this lock will not be temporarily released during join.
+This lock will be used only by API calls (not by the SCHEDULER task)
+so it is safe to hold it while joining the threads or changing the src element state. The
+api_lock will serialise all external requests to adaptive demux. In order to
+avoid deadlocks, if a function needs to acquire both manifest and api locks,
+the api_lock will be taken first and the manifest_lock second.
+
+By using the api_lock a thread is protected against other API calls.
+*/
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstadaptivedemux.h"
+#include "gstadaptivedemux-private.h"
+
+#include "gst/gst-i18n-plugin.h"
+#include <gst/base/gstadapter.h>
+#include <gst/app/gstappsrc.h>
+
+GST_DEBUG_CATEGORY (adaptivedemux2_debug);
+#define GST_CAT_DEFAULT adaptivedemux2_debug
+
+#define DEFAULT_FAILED_COUNT 3
+#define DEFAULT_CONNECTION_BITRATE 0
+#define DEFAULT_BANDWIDTH_TARGET_RATIO 0.8f
+
+#define DEFAULT_MIN_BITRATE 0
+#define DEFAULT_MAX_BITRATE 0
+
+#define DEFAULT_MAX_BUFFERING_TIME (30 *  GST_SECOND)
+
+#define DEFAULT_BUFFERING_HIGH_WATERMARK_TIME (30 * GST_SECOND)
+#define DEFAULT_BUFFERING_LOW_WATERMARK_TIME (3 * GST_SECOND)
+#define DEFAULT_BUFFERING_HIGH_WATERMARK_FRAGMENTS 0.0
+#define DEFAULT_BUFFERING_LOW_WATERMARK_FRAGMENTS 0.0
+
+#define DEFAULT_CURRENT_LEVEL_TIME_VIDEO 0
+#define DEFAULT_CURRENT_LEVEL_TIME_AUDIO 0
+
+#define GST_API_GET_LOCK(d) (&(GST_ADAPTIVE_DEMUX_CAST(d)->priv->api_lock))
+#define GST_API_LOCK(d)   g_mutex_lock (GST_API_GET_LOCK (d));
+#define GST_API_UNLOCK(d) g_mutex_unlock (GST_API_GET_LOCK (d));
+
+enum
+{
+  PROP_0,
+  PROP_CONNECTION_SPEED,
+  PROP_BANDWIDTH_TARGET_RATIO,
+  PROP_CONNECTION_BITRATE,
+  PROP_MIN_BITRATE,
+  PROP_MAX_BITRATE,
+  PROP_CURRENT_BANDWIDTH,
+  PROP_MAX_BUFFERING_TIME,
+  PROP_BUFFERING_HIGH_WATERMARK_TIME,
+  PROP_BUFFERING_LOW_WATERMARK_TIME,
+  PROP_BUFFERING_HIGH_WATERMARK_FRAGMENTS,
+  PROP_BUFFERING_LOW_WATERMARK_FRAGMENTS,
+  PROP_CURRENT_LEVEL_TIME_VIDEO,
+  PROP_CURRENT_LEVEL_TIME_AUDIO,
+  PROP_LAST
+};
+
+static GstStaticPadTemplate gst_adaptive_demux_videosrc_template =
+GST_STATIC_PAD_TEMPLATE ("video_%02u",
+    GST_PAD_SRC,
+    GST_PAD_SOMETIMES,
+    GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_adaptive_demux_audiosrc_template =
+GST_STATIC_PAD_TEMPLATE ("audio_%02u",
+    GST_PAD_SRC,
+    GST_PAD_SOMETIMES,
+    GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_adaptive_demux_subtitlesrc_template =
+GST_STATIC_PAD_TEMPLATE ("subtitle_%02u",
+    GST_PAD_SRC,
+    GST_PAD_SOMETIMES,
+    GST_STATIC_CAPS_ANY);
+
+/* Private structure for a track being outputted */
+typedef struct _OutputSlot
+{
+  /* Output pad */
+  GstPad *pad;
+
+  /* Last flow return */
+  GstFlowReturn flow_ret;
+
+  /* Stream Type */
+  GstStreamType type;
+
+  /* Target track (reference) */
+  GstAdaptiveDemuxTrack *track;
+
+  /* Pending track (which will replace track) */
+  GstAdaptiveDemuxTrack *pending_track;
+
+  /* TRUE if a buffer or a gap event was pushed through this slot. */
+  gboolean pushed_timed_data;
+} OutputSlot;
+
+static GstBinClass *parent_class = NULL;
+static gint private_offset = 0;
+
+static void gst_adaptive_demux_class_init (GstAdaptiveDemuxClass * klass);
+static void gst_adaptive_demux_init (GstAdaptiveDemux * dec,
+    GstAdaptiveDemuxClass * klass);
+static void gst_adaptive_demux_finalize (GObject * object);
+static GstStateChangeReturn gst_adaptive_demux_change_state (GstElement *
+    element, GstStateChange transition);
+static gboolean gst_adaptive_demux_query (GstElement * element,
+    GstQuery * query);
+
+static void gst_adaptive_demux_handle_message (GstBin * bin, GstMessage * msg);
+
+static gboolean gst_adaptive_demux_sink_event (GstPad * pad, GstObject * parent,
+    GstEvent * event);
+static GstFlowReturn gst_adaptive_demux_sink_chain (GstPad * pad,
+    GstObject * parent, GstBuffer * buffer);
+static gboolean gst_adaptive_demux_src_query (GstPad * pad, GstObject * parent,
+    GstQuery * query);
+static gboolean gst_adaptive_demux_src_event (GstPad * pad, GstObject * parent,
+    GstEvent * event);
+
+static gboolean
+gst_adaptive_demux_push_src_event (GstAdaptiveDemux * demux, GstEvent * event);
+
+static void gst_adaptive_demux_output_loop (GstAdaptiveDemux * demux);
+static void gst_adaptive_demux_reset (GstAdaptiveDemux * demux);
+static gboolean gst_adaptive_demux_prepare_streams (GstAdaptiveDemux * demux,
+    gboolean first_and_live);
+
+static gboolean gst_adaptive_demux2_stream_select_bitrate (GstAdaptiveDemux *
+    demux, GstAdaptiveDemux2Stream * stream, guint64 bitrate);
+static GstFlowReturn
+gst_adaptive_demux_update_manifest_default (GstAdaptiveDemux * demux);
+
+static void gst_adaptive_demux_stop_manifest_update_task (GstAdaptiveDemux *
+    demux);
+static void gst_adaptive_demux_start_manifest_update_task (GstAdaptiveDemux *
+    demux);
+
+static void gst_adaptive_demux_start_tasks (GstAdaptiveDemux * demux);
+static void gst_adaptive_demux_stop_tasks (GstAdaptiveDemux * demux,
+    gboolean stop_updates);
+static GstFlowReturn
+gst_adaptive_demux2_stream_data_received_default (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer);
+static GstFlowReturn
+gst_adaptive_demux2_stream_finish_fragment_default (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+static GstFlowReturn
+gst_adaptive_demux2_stream_advance_fragment_unlocked (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstClockTime duration);
+
+static void
+gst_adaptive_demux2_stream_update_tracks (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+
+static gboolean
+gst_adaptive_demux_requires_periodical_playlist_update_default (GstAdaptiveDemux
+    * demux);
+
+/* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
+ * method to get to the padtemplates */
+GType
+gst_adaptive_demux_ng_get_type (void)
+{
+  static gsize type = 0;
+
+  if (g_once_init_enter (&type)) {
+    GType _type;
+    static const GTypeInfo info = {
+      sizeof (GstAdaptiveDemuxClass),
+      NULL,
+      NULL,
+      (GClassInitFunc) gst_adaptive_demux_class_init,
+      NULL,
+      NULL,
+      sizeof (GstAdaptiveDemux),
+      0,
+      (GInstanceInitFunc) gst_adaptive_demux_init,
+    };
+
+    _type = g_type_register_static (GST_TYPE_BIN,
+        "GstAdaptiveDemux2", &info, G_TYPE_FLAG_ABSTRACT);
+
+    private_offset =
+        g_type_add_instance_private (_type, sizeof (GstAdaptiveDemuxPrivate));
+
+    g_once_init_leave (&type, _type);
+  }
+  return type;
+}
+
+static inline GstAdaptiveDemuxPrivate *
+gst_adaptive_demux_get_instance_private (GstAdaptiveDemux * self)
+{
+  return (G_STRUCT_MEMBER_P (self, private_offset));
+}
+
+static void
+gst_adaptive_demux_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX (object);
+
+  GST_OBJECT_LOCK (demux);
+
+  switch (prop_id) {
+    case PROP_CONNECTION_SPEED:
+      demux->connection_speed = g_value_get_uint (value) * 1000;
+      GST_DEBUG_OBJECT (demux, "Connection speed set to %u",
+          demux->connection_speed);
+      break;
+    case PROP_BANDWIDTH_TARGET_RATIO:
+      demux->bandwidth_target_ratio = g_value_get_float (value);
+      break;
+    case PROP_MIN_BITRATE:
+      demux->min_bitrate = g_value_get_uint (value);
+      break;
+    case PROP_MAX_BITRATE:
+      demux->max_bitrate = g_value_get_uint (value);
+      break;
+    case PROP_CONNECTION_BITRATE:
+      demux->connection_speed = g_value_get_uint (value);
+      break;
+      /* FIXME: Recalculate track and buffering levels
+       * when watermarks change? */
+    case PROP_MAX_BUFFERING_TIME:
+      demux->max_buffering_time = g_value_get_uint64 (value);
+      break;
+    case PROP_BUFFERING_HIGH_WATERMARK_TIME:
+      demux->buffering_high_watermark_time = g_value_get_uint64 (value);
+      break;
+    case PROP_BUFFERING_LOW_WATERMARK_TIME:
+      demux->buffering_low_watermark_time = g_value_get_uint64 (value);
+      break;
+    case PROP_BUFFERING_HIGH_WATERMARK_FRAGMENTS:
+      demux->buffering_high_watermark_fragments = g_value_get_double (value);
+      break;
+    case PROP_BUFFERING_LOW_WATERMARK_FRAGMENTS:
+      demux->buffering_low_watermark_fragments = g_value_get_double (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+
+  GST_OBJECT_UNLOCK (demux);
+}
+
+static void
+gst_adaptive_demux_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX (object);
+
+  GST_OBJECT_LOCK (demux);
+
+  switch (prop_id) {
+    case PROP_CONNECTION_SPEED:
+      g_value_set_uint (value, demux->connection_speed / 1000);
+      break;
+    case PROP_BANDWIDTH_TARGET_RATIO:
+      g_value_set_float (value, demux->bandwidth_target_ratio);
+      break;
+    case PROP_MIN_BITRATE:
+      g_value_set_uint (value, demux->min_bitrate);
+      break;
+    case PROP_MAX_BITRATE:
+      g_value_set_uint (value, demux->max_bitrate);
+      break;
+    case PROP_CONNECTION_BITRATE:
+      g_value_set_uint (value, demux->connection_speed);
+      break;
+    case PROP_CURRENT_BANDWIDTH:
+      g_value_set_uint (value, demux->current_download_rate);
+      break;
+    case PROP_MAX_BUFFERING_TIME:
+      g_value_set_uint64 (value, demux->max_buffering_time);
+      break;
+    case PROP_BUFFERING_HIGH_WATERMARK_TIME:
+      g_value_set_uint64 (value, demux->buffering_high_watermark_time);
+      break;
+    case PROP_BUFFERING_LOW_WATERMARK_TIME:
+      g_value_set_uint64 (value, demux->buffering_low_watermark_time);
+      break;
+    case PROP_BUFFERING_HIGH_WATERMARK_FRAGMENTS:
+      g_value_set_double (value, demux->buffering_high_watermark_fragments);
+      break;
+    case PROP_BUFFERING_LOW_WATERMARK_FRAGMENTS:
+      g_value_set_double (value, demux->buffering_low_watermark_fragments);
+      break;
+    case PROP_CURRENT_LEVEL_TIME_VIDEO:
+      g_value_set_uint64 (value, demux->current_level_time_video);
+      break;
+    case PROP_CURRENT_LEVEL_TIME_AUDIO:
+      g_value_set_uint64 (value, demux->current_level_time_audio);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+
+  GST_OBJECT_UNLOCK (demux);
+}
+
+static void
+gst_adaptive_demux_class_init (GstAdaptiveDemuxClass * klass)
+{
+  GObjectClass *gobject_class;
+  GstElementClass *gstelement_class;
+  GstBinClass *gstbin_class;
+
+  gobject_class = G_OBJECT_CLASS (klass);
+  gstelement_class = GST_ELEMENT_CLASS (klass);
+  gstbin_class = GST_BIN_CLASS (klass);
+
+  GST_DEBUG_CATEGORY_INIT (adaptivedemux2_debug, "adaptivedemux2", 0,
+      "Base Adaptive Demux (ng)");
+
+  parent_class = g_type_class_peek_parent (klass);
+
+  if (private_offset != 0)
+    g_type_class_adjust_private_offset (klass, &private_offset);
+
+  gobject_class->set_property = gst_adaptive_demux_set_property;
+  gobject_class->get_property = gst_adaptive_demux_get_property;
+  gobject_class->finalize = gst_adaptive_demux_finalize;
+
+  g_object_class_install_property (gobject_class, PROP_CONNECTION_SPEED,
+      g_param_spec_uint ("connection-speed", "Connection Speed",
+          "Network connection speed to use in kbps (0 = calculate from downloaded"
+          " fragments)", 0, G_MAXUINT / 1000, DEFAULT_CONNECTION_BITRATE / 1000,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_BANDWIDTH_TARGET_RATIO,
+      g_param_spec_float ("bandwidth-target-ratio",
+          "Ratio of target bandwidth / available bandwidth",
+          "Limit of the available bitrate to use when switching to alternates",
+          0, 1, DEFAULT_BANDWIDTH_TARGET_RATIO,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_CONNECTION_BITRATE,
+      g_param_spec_uint ("connection-bitrate", "Connection Speed (bits/s)",
+          "Network connection speed to use (0 = automatic) (bits/s)",
+          0, G_MAXUINT, DEFAULT_CONNECTION_BITRATE,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_MIN_BITRATE,
+      g_param_spec_uint ("min-bitrate", "Minimum Bitrate",
+          "Minimum bitrate to use when switching to alternates (bits/s)",
+          0, G_MAXUINT, DEFAULT_MIN_BITRATE,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_MAX_BITRATE,
+      g_param_spec_uint ("max-bitrate", "Maximum Bitrate",
+          "Maximum bitrate to use when switching to alternates (bits/s)",
+          0, G_MAXUINT, DEFAULT_MAX_BITRATE,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_CURRENT_BANDWIDTH,
+      g_param_spec_uint ("current-bandwidth",
+          "Current download bandwidth (bits/s)",
+          "Report of current download bandwidth (based on arriving data) (bits/s)",
+          0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_MAX_BUFFERING_TIME,
+      g_param_spec_uint64 ("max-buffering-time",
+          "Buffering maximum size (ns)",
+          "Upper limit on the high watermark for parsed data, above which downloads are paused (in ns, 0=disable)",
+          0, G_MAXUINT64, DEFAULT_MAX_BUFFERING_TIME,
+          G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
+          G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class,
+      PROP_BUFFERING_HIGH_WATERMARK_TIME,
+      g_param_spec_uint64 ("high-watermark-time",
+          "High buffering watermark size (ns)",
+          "High watermark for parsed data above which downloads are paused (in ns, 0=disable)",
+          0, G_MAXUINT64, DEFAULT_BUFFERING_HIGH_WATERMARK_TIME,
+          G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
+          G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class,
+      PROP_BUFFERING_LOW_WATERMARK_TIME,
+      g_param_spec_uint64 ("low-watermark-time",
+          "Low buffering watermark size (ns)",
+          "Low watermark for parsed data below which downloads are resumed (in ns, 0=disable)",
+          0, G_MAXUINT64, DEFAULT_BUFFERING_LOW_WATERMARK_TIME,
+          G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
+          G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class,
+      PROP_BUFFERING_HIGH_WATERMARK_FRAGMENTS,
+      g_param_spec_double ("high-watermark-fragments",
+          "High buffering watermark size (fragments)",
+          "High watermark for parsed data above which downloads are paused (in fragments, 0=disable)",
+          0, G_MAXFLOAT, DEFAULT_BUFFERING_HIGH_WATERMARK_FRAGMENTS,
+          G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING | G_PARAM_READWRITE |
+          G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class,
+      PROP_BUFFERING_LOW_WATERMARK_FRAGMENTS,
+      g_param_spec_double ("low-watermark-fragments",
+          "Low buffering watermark size (fragments)",
+          "Low watermark for parsed data below which downloads are resumed (in fragments, 0=disable)",
+          0, G_MAXFLOAT, DEFAULT_BUFFERING_LOW_WATERMARK_FRAGMENTS,
+          G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING | G_PARAM_READWRITE |
+          G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_CURRENT_LEVEL_TIME_VIDEO,
+      g_param_spec_uint64 ("current-level-time-video",
+          "Currently buffered level of video (ns)",
+          "Currently buffered level of video track(s) (ns)",
+          0, G_MAXUINT64, DEFAULT_CURRENT_LEVEL_TIME_VIDEO,
+          G_PARAM_READABLE | GST_PARAM_MUTABLE_PLAYING |
+          G_PARAM_STATIC_STRINGS));
+
+  g_object_class_install_property (gobject_class, PROP_CURRENT_LEVEL_TIME_AUDIO,
+      g_param_spec_uint64 ("current-level-time-audio",
+          "Currently buffered level of audio (ns)",
+          "Currently buffered level of audio track(s) (ns)",
+          0, G_MAXUINT64, DEFAULT_CURRENT_LEVEL_TIME_AUDIO,
+          G_PARAM_READABLE | GST_PARAM_MUTABLE_PLAYING |
+          G_PARAM_STATIC_STRINGS));
+
+  gst_element_class_add_static_pad_template (gstelement_class,
+      &gst_adaptive_demux_audiosrc_template);
+  gst_element_class_add_static_pad_template (gstelement_class,
+      &gst_adaptive_demux_videosrc_template);
+  gst_element_class_add_static_pad_template (gstelement_class,
+      &gst_adaptive_demux_subtitlesrc_template);
+
+
+  gstelement_class->change_state = gst_adaptive_demux_change_state;
+  gstelement_class->query = gst_adaptive_demux_query;
+
+  gstbin_class->handle_message = gst_adaptive_demux_handle_message;
+
+  klass->data_received = gst_adaptive_demux2_stream_data_received_default;
+  klass->finish_fragment = gst_adaptive_demux2_stream_finish_fragment_default;
+  klass->update_manifest = gst_adaptive_demux_update_manifest_default;
+  klass->requires_periodical_playlist_update =
+      gst_adaptive_demux_requires_periodical_playlist_update_default;
+  klass->stream_update_tracks = gst_adaptive_demux2_stream_update_tracks;
+  gst_type_mark_as_plugin_api (GST_TYPE_ADAPTIVE_DEMUX, 0);
+}
+
+static void
+gst_adaptive_demux_init (GstAdaptiveDemux * demux,
+    GstAdaptiveDemuxClass * klass)
+{
+  GstPadTemplate *pad_template;
+
+  GST_DEBUG_OBJECT (demux, "gst_adaptive_demux_init");
+
+  demux->priv = gst_adaptive_demux_get_instance_private (demux);
+  demux->priv->input_adapter = gst_adapter_new ();
+  demux->realtime_clock = gst_adaptive_demux_clock_new ();
+
+  demux->download_helper = downloadhelper_new (demux->realtime_clock);
+  demux->priv->segment_seqnum = gst_util_seqnum_next ();
+  demux->have_group_id = FALSE;
+  demux->group_id = G_MAXUINT;
+
+  gst_segment_init (&demux->segment, GST_FORMAT_TIME);
+  demux->instant_rate_multiplier = 1.0;
+
+  gst_bin_set_suppressed_flags (GST_BIN_CAST (demux),
+      GST_ELEMENT_FLAG_SOURCE | GST_ELEMENT_FLAG_SINK);
+
+  g_rec_mutex_init (&demux->priv->manifest_lock);
+
+  demux->priv->scheduler_task = gst_adaptive_demux_loop_new ();
+  g_mutex_init (&demux->priv->scheduler_lock);
+
+  g_mutex_init (&demux->priv->api_lock);
+  g_mutex_init (&demux->priv->segment_lock);
+
+  g_mutex_init (&demux->priv->tracks_lock);
+  g_cond_init (&demux->priv->tracks_add);
+
+  g_mutex_init (&demux->priv->buffering_lock);
+
+  demux->priv->periods = g_queue_new ();
+
+  pad_template =
+      gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
+  g_return_if_fail (pad_template != NULL);
+
+  demux->sinkpad = gst_pad_new_from_template (pad_template, "sink");
+  gst_pad_set_event_function (demux->sinkpad,
+      GST_DEBUG_FUNCPTR (gst_adaptive_demux_sink_event));
+  gst_pad_set_chain_function (demux->sinkpad,
+      GST_DEBUG_FUNCPTR (gst_adaptive_demux_sink_chain));
+
+  /* Properties */
+  demux->bandwidth_target_ratio = DEFAULT_BANDWIDTH_TARGET_RATIO;
+  demux->connection_speed = DEFAULT_CONNECTION_BITRATE;
+  demux->min_bitrate = DEFAULT_MIN_BITRATE;
+  demux->max_bitrate = DEFAULT_MAX_BITRATE;
+
+  demux->max_buffering_time = DEFAULT_MAX_BUFFERING_TIME;
+  demux->buffering_high_watermark_time = DEFAULT_BUFFERING_HIGH_WATERMARK_TIME;
+  demux->buffering_low_watermark_time = DEFAULT_BUFFERING_LOW_WATERMARK_TIME;
+  demux->buffering_high_watermark_fragments =
+      DEFAULT_BUFFERING_HIGH_WATERMARK_FRAGMENTS;
+  demux->buffering_low_watermark_fragments =
+      DEFAULT_BUFFERING_LOW_WATERMARK_FRAGMENTS;
+
+  demux->current_level_time_video = DEFAULT_CURRENT_LEVEL_TIME_VIDEO;
+  demux->current_level_time_audio = DEFAULT_CURRENT_LEVEL_TIME_AUDIO;
+
+  gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
+
+  demux->priv->duration = GST_CLOCK_TIME_NONE;
+
+  /* Output combiner */
+  demux->priv->flowcombiner = gst_flow_combiner_new ();
+
+  /* Output task */
+  g_rec_mutex_init (&demux->priv->output_lock);
+  demux->priv->output_task =
+      gst_task_new ((GstTaskFunction) gst_adaptive_demux_output_loop, demux,
+      NULL);
+  gst_task_set_lock (demux->priv->output_task, &demux->priv->output_lock);
+}
+
+static void
+gst_adaptive_demux_finalize (GObject * object)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (object);
+  GstAdaptiveDemuxPrivate *priv = demux->priv;
+
+  GST_DEBUG_OBJECT (object, "finalize");
+
+  g_object_unref (priv->input_adapter);
+
+  downloadhelper_free (demux->download_helper);
+
+  g_rec_mutex_clear (&demux->priv->manifest_lock);
+  g_mutex_clear (&demux->priv->api_lock);
+  g_mutex_clear (&demux->priv->segment_lock);
+
+  g_mutex_clear (&demux->priv->buffering_lock);
+
+  g_mutex_clear (&demux->priv->scheduler_lock);
+  gst_adaptive_demux_loop_unref (demux->priv->scheduler_task);
+
+  /* The input period is present after a reset, clear it now */
+  if (demux->input_period)
+    gst_adaptive_demux_period_unref (demux->input_period);
+
+  if (demux->realtime_clock) {
+    gst_adaptive_demux_clock_unref (demux->realtime_clock);
+    demux->realtime_clock = NULL;
+  }
+  g_object_unref (priv->output_task);
+  g_rec_mutex_clear (&priv->output_lock);
+
+  gst_flow_combiner_free (priv->flowcombiner);
+
+  g_queue_free (priv->periods);
+
+  G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_adaptive_demux_check_streams_aware (GstAdaptiveDemux * demux)
+{
+  gboolean ret = FALSE;
+  GstObject *parent = gst_object_get_parent (GST_OBJECT (demux));
+
+  ret = (parent && GST_OBJECT_FLAG_IS_SET (parent, GST_BIN_FLAG_STREAMS_AWARE));
+
+  return ret;
+}
+
+static GstStateChangeReturn
+gst_adaptive_demux_change_state (GstElement * element,
+    GstStateChange transition)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (element);
+  GstStateChangeReturn result = GST_STATE_CHANGE_FAILURE;
+
+  switch (transition) {
+    case GST_STATE_CHANGE_NULL_TO_READY:
+      if (!gst_adaptive_demux_check_streams_aware (demux)) {
+        GST_ELEMENT_ERROR (demux, CORE, STATE_CHANGE,
+            (_("Element requires a streams-aware context.")), (NULL));
+        goto fail_out;
+      }
+      break;
+    case GST_STATE_CHANGE_PAUSED_TO_READY:
+      if (g_atomic_int_compare_and_exchange (&demux->running, TRUE, FALSE))
+        GST_DEBUG_OBJECT (demux, "demuxer has stopped running");
+
+      gst_adaptive_demux_loop_stop (demux->priv->scheduler_task, TRUE);
+      downloadhelper_stop (demux->download_helper);
+
+      TRACKS_LOCK (demux);
+      demux->priv->flushing = TRUE;
+      g_cond_signal (&demux->priv->tracks_add);
+      gst_task_stop (demux->priv->output_task);
+      TRACKS_UNLOCK (demux);
+
+      gst_task_join (demux->priv->output_task);
+
+      GST_API_LOCK (demux);
+      gst_adaptive_demux_reset (demux);
+      GST_API_UNLOCK (demux);
+      break;
+    case GST_STATE_CHANGE_READY_TO_PAUSED:
+      GST_API_LOCK (demux);
+      gst_adaptive_demux_reset (demux);
+
+      gst_adaptive_demux_loop_start (demux->priv->scheduler_task);
+      if (g_atomic_int_get (&demux->priv->have_manifest))
+        gst_adaptive_demux_start_manifest_update_task (demux);
+      GST_API_UNLOCK (demux);
+      if (g_atomic_int_compare_and_exchange (&demux->running, FALSE, TRUE))
+        GST_DEBUG_OBJECT (demux, "demuxer has started running");
+      /* gst_task_start (demux->priv->output_task); */
+      break;
+    default:
+      break;
+  }
+
+  /* this must be run with the scheduler and output tasks stopped. */
+  result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+  switch (transition) {
+    case GST_STATE_CHANGE_READY_TO_PAUSED:
+      /* Start download task */
+      downloadhelper_start (demux->download_helper);
+      break;
+    default:
+      break;
+  }
+
+fail_out:
+  return result;
+}
+
+static void
+gst_adaptive_demux_output_slot_free (GstAdaptiveDemux * demux,
+    OutputSlot * slot)
+{
+  GstEvent *eos = gst_event_new_eos ();
+  GST_DEBUG_OBJECT (slot->pad, "Releasing slot");
+
+  gst_event_set_seqnum (eos, demux->priv->segment_seqnum);
+  gst_pad_push_event (slot->pad, eos);
+  gst_pad_set_active (slot->pad, FALSE);
+  gst_flow_combiner_remove_pad (demux->priv->flowcombiner, slot->pad);
+  gst_element_remove_pad (GST_ELEMENT_CAST (demux), slot->pad);
+  if (slot->track)
+    gst_adaptive_demux_track_unref (slot->track);
+  if (slot->pending_track)
+    gst_adaptive_demux_track_unref (slot->pending_track);
+
+  g_slice_free (OutputSlot, slot);
+}
+
+static OutputSlot *
+gst_adaptive_demux_output_slot_new (GstAdaptiveDemux * demux,
+    GstStreamType streamtype)
+{
+  OutputSlot *slot;
+  GstPadTemplate *tmpl;
+  gchar *name;
+
+  switch (streamtype) {
+    case GST_STREAM_TYPE_AUDIO:
+      name = g_strdup_printf ("audio_%02u", demux->priv->n_audio_streams++);
+      tmpl =
+          gst_static_pad_template_get (&gst_adaptive_demux_audiosrc_template);
+      break;
+    case GST_STREAM_TYPE_VIDEO:
+      name = g_strdup_printf ("video_%02u", demux->priv->n_video_streams++);
+      tmpl =
+          gst_static_pad_template_get (&gst_adaptive_demux_videosrc_template);
+      break;
+    case GST_STREAM_TYPE_TEXT:
+      name =
+          g_strdup_printf ("subtitle_%02u", demux->priv->n_subtitle_streams++);
+      tmpl =
+          gst_static_pad_template_get
+          (&gst_adaptive_demux_subtitlesrc_template);
+      break;
+    default:
+      g_assert_not_reached ();
+      return NULL;
+  }
+
+  slot = g_slice_new0 (OutputSlot);
+  slot->type = streamtype;
+  slot->pushed_timed_data = FALSE;
+
+  /* Create and activate new pads */
+  slot->pad = gst_pad_new_from_template (tmpl, name);
+  g_free (name);
+  gst_object_unref (tmpl);
+
+  gst_element_add_pad (GST_ELEMENT_CAST (demux), slot->pad);
+  gst_flow_combiner_add_pad (demux->priv->flowcombiner, slot->pad);
+  gst_pad_set_active (slot->pad, TRUE);
+
+  gst_pad_set_query_function (slot->pad,
+      GST_DEBUG_FUNCPTR (gst_adaptive_demux_src_query));
+  gst_pad_set_event_function (slot->pad,
+      GST_DEBUG_FUNCPTR (gst_adaptive_demux_src_event));
+
+  gst_pad_set_element_private (slot->pad, slot);
+
+  GST_INFO_OBJECT (demux, "Created output slot %s:%s",
+      GST_DEBUG_PAD_NAME (slot->pad));
+  return slot;
+}
+
+/* Called:
+ * * After `process_manifest` or when a period starts
+ * * Or when all tracks have been created
+ *
+ * Goes over tracks and creates the collection
+ *
+ * Returns TRUE if the collection was fully created.
+ *
+ * Must be called with MANIFEST_LOCK and TRACKS_LOCK taken.
+ * */
+static gboolean
+gst_adaptive_demux_update_collection (GstAdaptiveDemux * demux,
+    GstAdaptiveDemuxPeriod * period)
+{
+  GstStreamCollection *collection;
+  GList *iter;
+
+  GST_DEBUG_OBJECT (demux, "tracks_changed : %d", period->tracks_changed);
+
+  if (!period->tracks_changed) {
+    GST_DEBUG_OBJECT (demux, "Tracks didn't change");
+    return TRUE;
+  }
+
+  if (!period->tracks) {
+    GST_WARNING_OBJECT (demux, "No tracks registered/present");
+    return FALSE;
+  }
+
+  if (gst_adaptive_demux_period_has_pending_tracks (period)) {
+    GST_DEBUG_OBJECT (demux,
+        "Streams still have pending tracks, not creating/updating collection");
+    return FALSE;
+  }
+
+  /* Update collection */
+  collection = gst_stream_collection_new ("adaptivedemux");
+
+  for (iter = period->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) iter->data;
+
+    GST_DEBUG_OBJECT (demux, "Adding '%s' to collection", track->stream_id);
+    gst_stream_collection_add_stream (collection,
+        gst_object_ref (track->stream_object));
+  }
+
+  if (period->collection)
+    gst_object_unref (period->collection);
+  period->collection = collection;
+
+  return TRUE;
+}
+
+/*
+ * Called for the output period:
+ * * after `update_collection()` if the input period is the same as the output period
+ * * When the output period changes
+ *
+ * Must be called with MANIFEST_LOCK and TRACKS_LOCK taken.
+ */
+static gboolean
+gst_adaptive_demux_post_collection (GstAdaptiveDemux * demux)
+{
+  GstStreamCollection *collection;
+  GstAdaptiveDemuxPeriod *period = demux->output_period;
+  guint32 seqnum = g_atomic_int_get (&demux->priv->requested_selection_seqnum);
+
+  g_return_val_if_fail (period, FALSE);
+  if (!period->collection) {
+    GST_DEBUG_OBJECT (demux, "No collection available yet");
+    return TRUE;
+  }
+
+  collection = period->collection;
+
+  GST_DEBUG_OBJECT (demux, "Posting collection for period %d",
+      period->period_num);
+
+  /* Post collection */
+  TRACKS_UNLOCK (demux);
+  GST_MANIFEST_UNLOCK (demux);
+
+  gst_element_post_message (GST_ELEMENT_CAST (demux),
+      gst_message_new_stream_collection (GST_OBJECT (demux), collection));
+
+  GST_MANIFEST_LOCK (demux);
+  TRACKS_LOCK (demux);
+
+  /* If no stream selection was handled, make a default selection */
+  if (seqnum == g_atomic_int_get (&demux->priv->requested_selection_seqnum)) {
+    gst_adaptive_demux_period_select_default_tracks (demux,
+        demux->output_period);
+  }
+
+  /* Make sure the output task is running */
+  if (gst_adaptive_demux2_is_running (demux)) {
+    demux->priv->flushing = FALSE;
+    GST_DEBUG_OBJECT (demux, "Starting the output task");
+    gst_task_start (demux->priv->output_task);
+  }
+
+  return TRUE;
+}
+
+static gboolean
+handle_incoming_manifest (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *demux_class;
+  GstQuery *query;
+  gboolean query_res;
+  gboolean ret = TRUE;
+  gsize available;
+  GstBuffer *manifest_buffer;
+
+  GST_API_LOCK (demux);
+  GST_MANIFEST_LOCK (demux);
+
+  demux_class = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  available = gst_adapter_available (demux->priv->input_adapter);
+
+  if (available == 0)
+    goto eos_without_data;
+
+  GST_DEBUG_OBJECT (demux, "Got EOS on the sink pad: manifest fetched");
+
+  /* Need to get the URI to use it as a base to generate the fragment's
+   * uris */
+  query = gst_query_new_uri ();
+  query_res = gst_pad_peer_query (demux->sinkpad, query);
+  if (query_res) {
+    gchar *uri, *redirect_uri;
+    gboolean permanent;
+
+    gst_query_parse_uri (query, &uri);
+    gst_query_parse_uri_redirection (query, &redirect_uri);
+    gst_query_parse_uri_redirection_permanent (query, &permanent);
+
+    if (permanent && redirect_uri) {
+      demux->manifest_uri = redirect_uri;
+      demux->manifest_base_uri = NULL;
+      g_free (uri);
+    } else {
+      demux->manifest_uri = uri;
+      demux->manifest_base_uri = redirect_uri;
+    }
+
+    GST_DEBUG_OBJECT (demux, "Fetched manifest at URI: %s (base: %s)",
+        demux->manifest_uri, GST_STR_NULL (demux->manifest_base_uri));
+  } else {
+    GST_WARNING_OBJECT (demux, "Upstream URI query failed.");
+  }
+  gst_query_unref (query);
+
+  /* If somehow we didn't receive a stream-start with a group_id, pick one now */
+  if (!demux->have_group_id) {
+    demux->have_group_id = TRUE;
+    demux->group_id = gst_util_group_id_next ();
+  }
+
+  /* Let the subclass parse the manifest */
+  manifest_buffer =
+      gst_adapter_take_buffer (demux->priv->input_adapter, available);
+  ret = demux_class->process_manifest (demux, manifest_buffer);
+  gst_buffer_unref (manifest_buffer);
+
+  gst_element_post_message (GST_ELEMENT_CAST (demux),
+      gst_message_new_element (GST_OBJECT_CAST (demux),
+          gst_structure_new (GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME,
+              "manifest-uri", G_TYPE_STRING,
+              demux->manifest_uri, "uri", G_TYPE_STRING,
+              demux->manifest_uri,
+              "manifest-download-start", GST_TYPE_CLOCK_TIME,
+              GST_CLOCK_TIME_NONE,
+              "manifest-download-stop", GST_TYPE_CLOCK_TIME,
+              gst_util_get_timestamp (), NULL)));
+
+  if (!ret)
+    goto invalid_manifest;
+
+  /* Streams should have been added to the input period if the manifest parsing
+   * succeeded */
+  if (!demux->input_period->streams)
+    goto no_streams;
+
+  g_atomic_int_set (&demux->priv->have_manifest, TRUE);
+
+  GST_DEBUG_OBJECT (demux, "Manifest was processed, setting ourselves up");
+  /* Send duration message */
+  if (!gst_adaptive_demux_is_live (demux)) {
+    GstClockTime duration = demux_class->get_duration (demux);
+
+    demux->priv->duration = duration;
+    if (duration != GST_CLOCK_TIME_NONE) {
+      GST_DEBUG_OBJECT (demux,
+          "Sending duration message : %" GST_TIME_FORMAT,
+          GST_TIME_ARGS (duration));
+      gst_element_post_message (GST_ELEMENT (demux),
+          gst_message_new_duration_changed (GST_OBJECT (demux)));
+    } else {
+      GST_DEBUG_OBJECT (demux,
+          "media duration unknown, can not send the duration message");
+    }
+  }
+
+  TRACKS_LOCK (demux);
+  /* New streams/tracks will have been added to the input period */
+  /* The input period has streams, make it the active output period */
+  /* FIXME : Factorize this into a function to make a period active */
+  demux->output_period = gst_adaptive_demux_period_ref (demux->input_period);
+  ret = gst_adaptive_demux_update_collection (demux, demux->output_period) &&
+      gst_adaptive_demux_post_collection (demux);
+  TRACKS_UNLOCK (demux);
+
+  gst_adaptive_demux_prepare_streams (demux,
+      gst_adaptive_demux_is_live (demux));
+  gst_adaptive_demux_start_tasks (demux);
+  gst_adaptive_demux_start_manifest_update_task (demux);
+
+unlock_out:
+  GST_MANIFEST_UNLOCK (demux);
+  GST_API_UNLOCK (demux);
+
+  return ret;
+
+  /* ERRORS */
+eos_without_data:
+  {
+    GST_WARNING_OBJECT (demux, "Received EOS without a manifest.");
+    ret = FALSE;
+    goto unlock_out;
+  }
+
+no_streams:
+  {
+    /* no streams */
+    GST_WARNING_OBJECT (demux, "No streams created from manifest");
+    GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+        (_("This file contains no playable streams.")),
+        ("No known stream formats found at the Manifest"));
+    ret = FALSE;
+    goto unlock_out;
+  }
+
+invalid_manifest:
+  {
+    GST_MANIFEST_UNLOCK (demux);
+    GST_API_UNLOCK (demux);
+
+    /* In most cases, this will happen if we set a wrong url in the
+     * source element and we have received the 404 HTML response instead of
+     * the manifest */
+    GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Invalid manifest."), (NULL));
+    return FALSE;
+  }
+}
+
+struct http_headers_collector
+{
+  GstAdaptiveDemux *demux;
+  gchar **cookies;
+};
+
+static gboolean
+gst_adaptive_demux_handle_upstream_http_header (GQuark field_id,
+    const GValue * value, gpointer userdata)
+{
+  struct http_headers_collector *hdr_data = userdata;
+  GstAdaptiveDemux *demux = hdr_data->demux;
+  const gchar *field_name = g_quark_to_string (field_id);
+
+  if (G_UNLIKELY (value == NULL))
+    return TRUE;                /* This should not happen */
+
+  if (g_ascii_strcasecmp (field_name, "User-Agent") == 0) {
+    const gchar *user_agent = g_value_get_string (value);
+
+    GST_INFO_OBJECT (demux, "User-Agent : %s", GST_STR_NULL (user_agent));
+    downloadhelper_set_user_agent (demux->download_helper, user_agent);
+  }
+
+  if ((g_ascii_strcasecmp (field_name, "Cookie") == 0) ||
+      g_ascii_strcasecmp (field_name, "Set-Cookie") == 0) {
+    guint i = 0, prev_len = 0, total_len = 0;
+    gchar **cookies = NULL;
+
+    if (hdr_data->cookies != NULL)
+      prev_len = g_strv_length (hdr_data->cookies);
+
+    if (GST_VALUE_HOLDS_ARRAY (value)) {
+      total_len = gst_value_array_get_size (value) + prev_len;
+      cookies = (gchar **) g_malloc0 ((total_len + 1) * sizeof (gchar *));
+
+      for (i = 0; i < gst_value_array_get_size (value); i++) {
+        GST_INFO_OBJECT (demux, "%s : %s", g_quark_to_string (field_id),
+            g_value_get_string (gst_value_array_get_value (value, i)));
+        cookies[i] = g_value_dup_string (gst_value_array_get_value (value, i));
+      }
+    } else if (G_VALUE_HOLDS_STRING (value)) {
+      total_len = 1 + prev_len;
+      cookies = (gchar **) g_malloc0 ((total_len + 1) * sizeof (gchar *));
+
+      GST_INFO_OBJECT (demux, "%s : %s", g_quark_to_string (field_id),
+          g_value_get_string (value));
+      cookies[0] = g_value_dup_string (value);
+    } else {
+      GST_WARNING_OBJECT (demux, "%s field is not string or array",
+          g_quark_to_string (field_id));
+    }
+
+    if (cookies) {
+      if (prev_len) {
+        guint j;
+        for (j = 0; j < prev_len; j++) {
+          GST_DEBUG_OBJECT (demux,
+              "Append existing cookie %s", hdr_data->cookies[j]);
+          cookies[i + j] = g_strdup (hdr_data->cookies[j]);
+        }
+      }
+      cookies[total_len] = NULL;
+
+      g_strfreev (hdr_data->cookies);
+      hdr_data->cookies = cookies;
+    }
+  }
+
+  if (g_ascii_strcasecmp (field_name, "Referer") == 0) {
+    const gchar *referer = g_value_get_string (value);
+    GST_INFO_OBJECT (demux, "Referer : %s", GST_STR_NULL (referer));
+
+    downloadhelper_set_referer (demux->download_helper, referer);
+  }
+
+  /* Date header can be used to estimate server offset */
+  if (g_ascii_strcasecmp (field_name, "Date") == 0) {
+    const gchar *http_date = g_value_get_string (value);
+
+    if (http_date) {
+      GstDateTime *datetime =
+          gst_adaptive_demux_util_parse_http_head_date (http_date);
+
+      if (datetime) {
+        GDateTime *utc_now = gst_date_time_to_g_date_time (datetime);
+        gchar *date_string = gst_date_time_to_iso8601_string (datetime);
+
+        GST_INFO_OBJECT (demux,
+            "HTTP response Date %s", GST_STR_NULL (date_string));
+        g_free (date_string);
+
+        gst_adaptive_demux_clock_set_utc_time (demux->realtime_clock, utc_now);
+      }
+    }
+  }
+
+  return TRUE;
+}
+
+static gboolean
+gst_adaptive_demux_sink_event (GstPad * pad, GstObject * parent,
+    GstEvent * event)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (parent);
+  gboolean ret;
+
+  switch (event->type) {
+    case GST_EVENT_FLUSH_STOP:{
+      GST_API_LOCK (demux);
+      GST_MANIFEST_LOCK (demux);
+
+      gst_adaptive_demux_reset (demux);
+
+      ret = gst_pad_event_default (pad, parent, event);
+
+      GST_MANIFEST_UNLOCK (demux);
+      GST_API_UNLOCK (demux);
+
+      return ret;
+    }
+    case GST_EVENT_EOS:
+    {
+      if (GST_ADAPTIVE_SCHEDULER_LOCK (demux)) {
+        if (!handle_incoming_manifest (demux)) {
+          GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+          return gst_pad_event_default (pad, parent, event);
+        }
+        GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+      } else {
+        GST_ERROR_OBJECT (demux,
+            "Failed to acquire scheduler to handle manifest");
+        return gst_pad_event_default (pad, parent, event);
+      }
+      gst_event_unref (event);
+      return TRUE;
+    }
+    case GST_EVENT_STREAM_START:
+      if (gst_event_parse_group_id (event, &demux->group_id))
+        demux->have_group_id = TRUE;
+      else
+        demux->have_group_id = FALSE;
+      /* Swallow stream-start, we'll push our own */
+      gst_event_unref (event);
+      return TRUE;
+    case GST_EVENT_SEGMENT:
+      /* Swallow newsegments, we'll push our own */
+      gst_event_unref (event);
+      return TRUE;
+    case GST_EVENT_CUSTOM_DOWNSTREAM_STICKY:{
+      const GstStructure *structure = gst_event_get_structure (event);
+      struct http_headers_collector c = { demux, NULL };
+
+      if (gst_structure_has_name (structure, "http-headers")) {
+        if (gst_structure_has_field (structure, "request-headers")) {
+          GstStructure *req_headers = NULL;
+          gst_structure_get (structure, "request-headers", GST_TYPE_STRUCTURE,
+              &req_headers, NULL);
+          if (req_headers) {
+            gst_structure_foreach (req_headers,
+                gst_adaptive_demux_handle_upstream_http_header, &c);
+            gst_structure_free (req_headers);
+          }
+        }
+        if (gst_structure_has_field (structure, "response-headers")) {
+          GstStructure *res_headers = NULL;
+          gst_structure_get (structure, "response-headers", GST_TYPE_STRUCTURE,
+              &res_headers, NULL);
+          if (res_headers) {
+            gst_structure_foreach (res_headers,
+                gst_adaptive_demux_handle_upstream_http_header, &c);
+            gst_structure_free (res_headers);
+          }
+        }
+
+        if (c.cookies)
+          downloadhelper_set_cookies (demux->download_helper, c.cookies);
+      }
+      break;
+    }
+    default:
+      break;
+  }
+
+  return gst_pad_event_default (pad, parent, event);
+}
+
+static GstFlowReturn
+gst_adaptive_demux_sink_chain (GstPad * pad, GstObject * parent,
+    GstBuffer * buffer)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (parent);
+
+  GST_MANIFEST_LOCK (demux);
+
+  gst_adapter_push (demux->priv->input_adapter, buffer);
+
+  GST_INFO_OBJECT (demux, "Received manifest buffer, total size is %i bytes",
+      (gint) gst_adapter_available (demux->priv->input_adapter));
+
+  GST_MANIFEST_UNLOCK (demux);
+  return GST_FLOW_OK;
+}
+
+
+/* Called with TRACKS_LOCK taken */
+static void
+gst_adaptive_demux_period_reset_tracks (GstAdaptiveDemuxPeriod * period)
+{
+  GList *tmp;
+
+  for (tmp = period->tracks; tmp; tmp = tmp->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) tmp->data;
+
+    gst_adaptive_demux_track_flush (track);
+    if (gst_pad_is_active (track->sinkpad)) {
+      gst_pad_set_active (track->sinkpad, FALSE);
+      gst_pad_set_active (track->sinkpad, TRUE);
+    }
+  }
+}
+
+/* Resets all tracks to their initial state, ready to receive new data. */
+static void
+gst_adaptive_demux_reset_tracks (GstAdaptiveDemux * demux)
+{
+  TRACKS_LOCK (demux);
+  g_queue_foreach (demux->priv->periods,
+      (GFunc) gst_adaptive_demux_period_reset_tracks, NULL);
+  TRACKS_UNLOCK (demux);
+}
+
+/* Subclasses will call this function to ensure that a new input period is
+ * available to receive new streams and tracks */
+gboolean
+gst_adaptive_demux_start_new_period (GstAdaptiveDemux * demux)
+{
+  if (demux->input_period && !demux->input_period->prepared) {
+    GST_DEBUG_OBJECT (demux, "Using existing input period");
+    return TRUE;
+  }
+
+  if (demux->input_period) {
+    GST_DEBUG_OBJECT (demux, "Closing previous period");
+    demux->input_period->closed = TRUE;
+  }
+  GST_DEBUG_OBJECT (demux, "Setting up new period");
+
+  demux->input_period = gst_adaptive_demux_period_new (demux);
+
+  return TRUE;
+}
+
+/* must be called with manifest_lock taken */
+static void
+gst_adaptive_demux_reset (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  GList *iter;
+
+  gst_adaptive_demux_stop_tasks (demux, TRUE);
+
+  if (klass->reset)
+    klass->reset (demux);
+
+  /* Disable and remove all outputs */
+  GST_DEBUG_OBJECT (demux, "Disabling and removing all outputs");
+  for (iter = demux->priv->outputs; iter; iter = iter->next) {
+    gst_adaptive_demux_output_slot_free (demux, (OutputSlot *) iter->data);
+  }
+  g_list_free (demux->priv->outputs);
+  demux->priv->outputs = NULL;
+
+  g_queue_clear_full (demux->priv->periods,
+      (GDestroyNotify) gst_adaptive_demux_period_unref);
+
+  /* The output period always has an extra ref taken on it */
+  if (demux->output_period)
+    gst_adaptive_demux_period_unref (demux->output_period);
+  demux->output_period = NULL;
+  /* The input period doesn't have an extra ref taken on it */
+  demux->input_period = NULL;
+
+  gst_adaptive_demux_start_new_period (demux);
+
+  g_free (demux->manifest_uri);
+  g_free (demux->manifest_base_uri);
+  demux->manifest_uri = NULL;
+  demux->manifest_base_uri = NULL;
+
+  gst_adapter_clear (demux->priv->input_adapter);
+  g_atomic_int_set (&demux->priv->have_manifest, FALSE);
+
+  gst_segment_init (&demux->segment, GST_FORMAT_TIME);
+  demux->instant_rate_multiplier = 1.0;
+
+  demux->priv->duration = GST_CLOCK_TIME_NONE;
+
+  demux->priv->percent = -1;
+  demux->priv->is_buffering = TRUE;
+
+  demux->have_group_id = FALSE;
+  demux->group_id = G_MAXUINT;
+  demux->priv->segment_seqnum = gst_util_seqnum_next ();
+
+  demux->priv->global_output_position = 0;
+
+  demux->priv->n_audio_streams = 0;
+  demux->priv->n_video_streams = 0;
+  demux->priv->n_subtitle_streams = 0;
+
+  gst_flow_combiner_reset (demux->priv->flowcombiner);
+}
+
+static gboolean
+gst_adaptive_demux_query (GstElement * element, GstQuery * query)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (element);
+
+  GST_LOG_OBJECT (demux, "%" GST_PTR_FORMAT, query);
+
+  switch (GST_QUERY_TYPE (query)) {
+    case GST_QUERY_BUFFERING:
+    {
+      GstFormat format;
+      gst_query_parse_buffering_range (query, &format, NULL, NULL, NULL);
+
+      if (!demux->output_period) {
+        if (format != GST_FORMAT_TIME) {
+          GST_DEBUG_OBJECT (demux,
+              "No period setup yet, can't answer non-TIME buffering queries");
+          return FALSE;
+        }
+
+        GST_DEBUG_OBJECT (demux,
+            "No period setup yet, but still answering buffering query");
+        return TRUE;
+      }
+    }
+    default:
+      break;
+  }
+
+  return GST_ELEMENT_CLASS (parent_class)->query (element, query);
+}
+
+/* MANIFEST_LOCK held. Find the stream that owns the given element */
+static GstAdaptiveDemux2Stream *
+find_stream_for_element_locked (GstAdaptiveDemux * demux, GstObject * o)
+{
+  GList *iter;
+
+  /* We only look in the streams of the input period (i.e. with active streams) */
+  for (iter = demux->input_period->streams; iter; iter = iter->next) {
+    GstAdaptiveDemux2Stream *stream = (GstAdaptiveDemux2Stream *) iter->data;
+    if (gst_object_has_as_ancestor (o, GST_OBJECT_CAST (stream->parsebin))) {
+      return stream;
+    }
+  }
+
+  return NULL;
+}
+
+/* TRACKS_LOCK held */
+static GstAdaptiveDemuxTrack *
+gst_adaptive_demux2_stream_find_track_of_type (GstAdaptiveDemux2Stream * stream,
+    GstStreamType stream_type)
+{
+  GList *iter;
+
+  for (iter = stream->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *track = iter->data;
+
+    if (track->type == stream_type)
+      return track;
+  }
+
+  return NULL;
+}
+
+/* MANIFEST and TRACKS lock held */
+static void
+gst_adaptive_demux2_stream_update_tracks (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  guint i;
+
+  GST_DEBUG_OBJECT (stream, "Updating track information from collection");
+
+  for (i = 0; i < gst_stream_collection_get_size (stream->stream_collection);
+      i++) {
+    GstStream *gst_stream =
+        gst_stream_collection_get_stream (stream->stream_collection, i);
+    GstStreamType stream_type = gst_stream_get_stream_type (gst_stream);
+    GstAdaptiveDemuxTrack *track;
+
+    if (stream_type == GST_STREAM_TYPE_UNKNOWN)
+      continue;
+    track = gst_adaptive_demux2_stream_find_track_of_type (stream, stream_type);
+    if (!track) {
+      GST_DEBUG_OBJECT (stream,
+          "We don't have an existing track to handle stream %" GST_PTR_FORMAT,
+          gst_stream);
+      continue;
+    }
+
+    if (track->upstream_stream_id)
+      g_free (track->upstream_stream_id);
+    track->upstream_stream_id =
+        g_strdup (gst_stream_get_stream_id (gst_stream));
+  }
+
+}
+
+static gboolean
+tags_have_language_info (GstTagList * tags)
+{
+  const gchar *language = NULL;
+
+  if (tags == NULL)
+    return FALSE;
+
+  if (gst_tag_list_peek_string_index (tags, GST_TAG_LANGUAGE_CODE, 0,
+          &language))
+    return TRUE;
+  if (gst_tag_list_peek_string_index (tags, GST_TAG_LANGUAGE_NAME, 0,
+          &language))
+    return TRUE;
+
+  return FALSE;
+}
+
+static gboolean
+can_handle_collection (GstAdaptiveDemux2Stream * stream,
+    GstStreamCollection * collection)
+{
+  guint i;
+  guint nb_audio, nb_video, nb_text;
+  gboolean have_audio_languages = TRUE;
+  gboolean have_text_languages = TRUE;
+
+  nb_audio = nb_video = nb_text = 0;
+
+  for (i = 0; i < gst_stream_collection_get_size (collection); i++) {
+    GstStream *gst_stream = gst_stream_collection_get_stream (collection, i);
+    GstTagList *tags = gst_stream_get_tags (gst_stream);
+
+    GST_DEBUG_OBJECT (stream,
+        "Internal collection stream #%d %" GST_PTR_FORMAT, i, gst_stream);
+    switch (gst_stream_get_stream_type (gst_stream)) {
+      case GST_STREAM_TYPE_AUDIO:
+        have_audio_languages &= tags_have_language_info (tags);
+        nb_audio++;
+        break;
+      case GST_STREAM_TYPE_VIDEO:
+        nb_video++;
+        break;
+      case GST_STREAM_TYPE_TEXT:
+        have_text_languages &= tags_have_language_info (tags);
+        nb_text++;
+        break;
+      default:
+        break;
+    }
+  }
+
+  /* Check that we either have at most 1 of each track type, or that
+   * we have language tags for each to tell which is which */
+  if (nb_video > 1 ||
+      (nb_audio > 1 && !have_audio_languages) ||
+      (nb_text > 1 && !have_text_languages)) {
+    GST_WARNING
+        ("Collection can't be handled (nb_audio:%d, nb_video:%d, nb_text:%d)",
+        nb_audio, nb_video, nb_text);
+    return FALSE;
+  }
+
+  return TRUE;
+}
+
+static void
+gst_adaptive_demux_handle_stream_collection_msg (GstAdaptiveDemux * demux,
+    GstMessage * msg)
+{
+  GstAdaptiveDemux2Stream *stream;
+  GstStreamCollection *collection = NULL;
+  gboolean pending_tracks_activated = FALSE;
+
+  GST_MANIFEST_LOCK (demux);
+
+  stream = find_stream_for_element_locked (demux, GST_MESSAGE_SRC (msg));
+  if (stream == NULL) {
+    GST_WARNING_OBJECT (demux,
+        "Failed to locate stream for collection message");
+    goto beach;
+  }
+
+  gst_message_parse_stream_collection (msg, &collection);
+  if (!collection)
+    goto beach;
+
+  /* Check whether the collection is "sane" or not.
+   *
+   * In the context of adaptive streaming, we can only handle multiplexed
+   * content that provides at most one stream of valid types (audio, video,
+   * text). Without this we cannot reliably match the output of this multiplex
+   * to the various tracks.
+   *
+   * FIXME : In the future and *IF* we encounter such streams, we could envision
+   * supporting multiple streams of the same type if, and only if, they have
+   * tags that allow differentiating them (ex: languages).
+   */
+  if (!can_handle_collection (stream, collection)) {
+    GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+        (_("Stream format can't be handled")),
+        ("The streams provided by the multiplex are ambiguous"));
+    goto beach;
+  }
+
+  /* Store the collection on the stream */
+  gst_object_replace ((GstObject **) & stream->stream_collection,
+      (GstObject *) collection);
+
+  /* IF there are pending tracks, ask the subclass to handle that */
+  if (stream->pending_tracks) {
+    GstAdaptiveDemuxClass *demux_class = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+    g_assert (demux_class->stream_update_tracks);
+    demux_class->stream_update_tracks (demux, stream);
+    TRACKS_LOCK (demux);
+    stream->pending_tracks = FALSE;
+    pending_tracks_activated = TRUE;
+    if (gst_adaptive_demux_update_collection (demux, demux->input_period) &&
+        demux->input_period == demux->output_period)
+      gst_adaptive_demux_post_collection (demux);
+  } else {
+    g_assert (stream->tracks);
+    TRACKS_LOCK (demux);
+    /* If we already have assigned tracks, update the pending upstream stream_id
+     * for each of them based on the collection information. */
+    gst_adaptive_demux2_stream_update_tracks (demux, stream);
+  }
+
+  /* If we discovered pending tracks and we no longer have any, we can ensure
+   * selected tracks are started */
+  if (pending_tracks_activated
+      && !gst_adaptive_demux_period_has_pending_tracks (demux->input_period)) {
+    GList *iter = demux->input_period->streams;
+    for (; iter; iter = iter->next) {
+      GstAdaptiveDemux2Stream *new_stream = iter->data;
+
+      /* The stream that posted this collection was already started. If a
+       * different stream is now selected, start it */
+      if (stream != new_stream
+          && gst_adaptive_demux2_stream_is_selected_locked (new_stream))
+        gst_adaptive_demux2_stream_start (new_stream);
+    }
+  }
+  TRACKS_UNLOCK (demux);
+
+beach:
+  GST_MANIFEST_UNLOCK (demux);
+
+  gst_message_unref (msg);
+  msg = NULL;
+}
+
+static void
+gst_adaptive_demux_handle_message (GstBin * bin, GstMessage * msg)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (bin);
+
+  switch (GST_MESSAGE_TYPE (msg)) {
+    case GST_MESSAGE_STREAM_COLLECTION:
+    {
+      gst_adaptive_demux_handle_stream_collection_msg (demux, msg);
+      return;
+    }
+    case GST_MESSAGE_ERROR:{
+      GstAdaptiveDemux2Stream *stream = NULL;
+      GError *err = NULL;
+      gchar *debug = NULL;
+      gchar *new_error = NULL;
+      const GstStructure *details = NULL;
+
+      GST_MANIFEST_LOCK (demux);
+
+      stream = find_stream_for_element_locked (demux, GST_MESSAGE_SRC (msg));
+      if (stream == NULL) {
+        GST_WARNING_OBJECT (demux,
+            "Failed to locate stream for errored element");
+        GST_MANIFEST_UNLOCK (demux);
+        break;
+      }
+
+      gst_message_parse_error (msg, &err, &debug);
+
+      GST_WARNING_OBJECT (demux,
+          "Source posted error: %d:%d %s (%s)", err->domain, err->code,
+          err->message, debug);
+
+      if (debug)
+        new_error = g_strdup_printf ("%s: %s\n", err->message, debug);
+      if (new_error) {
+        g_free (err->message);
+        err->message = new_error;
+      }
+
+      gst_message_parse_error_details (msg, &details);
+      if (details) {
+        gst_structure_get_uint (details, "http-status-code",
+            &stream->last_status_code);
+      }
+
+      /* error, but ask to retry */
+      if (GST_ADAPTIVE_SCHEDULER_LOCK (demux)) {
+        gst_adaptive_demux2_stream_parse_error (stream, err);
+        GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+      }
+
+      g_error_free (err);
+      g_free (debug);
+
+      GST_MANIFEST_UNLOCK (demux);
+
+      gst_message_unref (msg);
+      msg = NULL;
+    }
+      break;
+    default:
+      break;
+  }
+
+  if (msg)
+    GST_BIN_CLASS (parent_class)->handle_message (bin, msg);
+}
+
+/* must be called with manifest_lock taken */
+GstClockTime
+gst_adaptive_demux2_stream_get_presentation_offset (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemuxClass *klass;
+
+  klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  if (klass->get_presentation_offset == NULL)
+    return 0;
+
+  return klass->get_presentation_offset (demux, stream);
+}
+
+/* must be called with manifest_lock taken */
+GstClockTime
+gst_adaptive_demux_get_period_start_time (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *klass;
+
+  klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  if (klass->get_period_start_time == NULL)
+    return 0;
+
+  return klass->get_period_start_time (demux);
+}
+
+/* must be called with manifest_lock taken */
+static gboolean
+gst_adaptive_demux_prepare_streams (GstAdaptiveDemux * demux,
+    gboolean first_and_live)
+{
+  GList *iter;
+  GstClockTime period_start;
+  GstClockTimeDiff min_stream_time = GST_CLOCK_STIME_NONE;
+  GList *new_streams;
+
+  g_return_val_if_fail (demux->input_period->streams, FALSE);
+  g_assert (demux->input_period->prepared == FALSE);
+
+  new_streams = demux->input_period->streams;
+
+  if (!gst_adaptive_demux2_is_running (demux)) {
+    GST_DEBUG_OBJECT (demux, "Not exposing pads due to shutdown");
+    return TRUE;
+  }
+
+  GST_DEBUG_OBJECT (demux,
+      "Preparing %d streams for period %d , first_and_live:%d",
+      g_list_length (new_streams), demux->input_period->period_num,
+      first_and_live);
+
+  for (iter = new_streams; iter; iter = g_list_next (iter)) {
+    GstAdaptiveDemux2Stream *stream = iter->data;
+
+    GST_DEBUG_OBJECT (stream, "Preparing stream");
+
+    stream->need_header = TRUE;
+    stream->discont = TRUE;
+
+    /* Grab the first stream time for live streams
+     * * If the stream is selected
+     * * Or it provides dynamic tracks (in which case we need to force an update)
+     */
+    if (first_and_live
+        && (gst_adaptive_demux2_stream_is_selected_locked (stream)
+            || stream->pending_tracks)) {
+      /* TODO we only need the first timestamp, maybe create a simple function to
+       * get the current PTS of a fragment ? */
+      GST_DEBUG_OBJECT (stream, "Calling update_fragment_info");
+      gst_adaptive_demux2_stream_update_fragment_info (demux, stream);
+
+      GST_DEBUG_OBJECT (stream,
+          "Got stream time %" GST_STIME_FORMAT,
+          GST_STIME_ARGS (stream->fragment.stream_time));
+
+      if (GST_CLOCK_STIME_IS_VALID (min_stream_time)) {
+        min_stream_time = MIN (min_stream_time, stream->fragment.stream_time);
+      } else {
+        min_stream_time = stream->fragment.stream_time;
+      }
+    }
+  }
+
+  period_start = gst_adaptive_demux_get_period_start_time (demux);
+
+  /* For live streams, the subclass is supposed to seek to the current fragment
+   * and then tell us its stream time in stream->fragment.stream_time.  We now
+   * also have to seek our demuxer segment to reflect this.
+   *
+   * FIXME: This needs some refactoring at some point.
+   */
+  if (first_and_live) {
+    gst_segment_do_seek (&demux->segment, demux->segment.rate, GST_FORMAT_TIME,
+        GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, min_stream_time + period_start,
+        GST_SEEK_TYPE_NONE, -1, NULL);
+  }
+
+  GST_DEBUG_OBJECT (demux,
+      "period_start:%" GST_TIME_FORMAT ", min_stream_time:%" GST_STIME_FORMAT
+      " demux segment %" GST_SEGMENT_FORMAT,
+      GST_TIME_ARGS (period_start), GST_STIME_ARGS (min_stream_time),
+      &demux->segment);
+
+  for (iter = new_streams; iter; iter = g_list_next (iter)) {
+    GstAdaptiveDemux2Stream *stream = iter->data;
+    stream->compute_segment = TRUE;
+    stream->first_and_live = first_and_live;
+  }
+  demux->priv->qos_earliest_time = GST_CLOCK_TIME_NONE;
+  demux->input_period->prepared = TRUE;
+
+  return TRUE;
+}
+
+static GstAdaptiveDemuxTrack *
+find_track_for_stream_id (GstAdaptiveDemuxPeriod * period, gchar * stream_id)
+{
+  GList *tmp;
+
+  for (tmp = period->tracks; tmp; tmp = tmp->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) tmp->data;
+    if (!g_strcmp0 (track->stream_id, stream_id))
+      return track;
+  }
+
+  return NULL;
+}
+
+/* TRACKS_LOCK hold */
+void
+demux_update_buffering_locked (GstAdaptiveDemux * demux)
+{
+  GstClockTime min_level_time = GST_CLOCK_TIME_NONE;
+  GstClockTime video_level_time = GST_CLOCK_TIME_NONE;
+  GstClockTime audio_level_time = GST_CLOCK_TIME_NONE;
+  GList *tmp;
+  gint min_percent = -1, percent;
+  gboolean all_eos = TRUE;
+
+  /* Go over all active tracks of the output period and update level */
+
+  /* Check that all tracks are above their respective low thresholds (different
+   * tracks may have different fragment durations yielding different buffering
+   * percentages) Overall buffering percent is the lowest. */
+  for (tmp = demux->output_period->tracks; tmp; tmp = tmp->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) tmp->data;
+
+    GST_LOG_OBJECT (demux,
+        "Checking track '%s' active:%d selected:%d eos:%d level:%"
+        GST_TIME_FORMAT " buffering_threshold:%" GST_TIME_FORMAT,
+        track->stream_id, track->active, track->selected, track->eos,
+        GST_TIME_ARGS (track->level_time),
+        GST_TIME_ARGS (track->buffering_threshold));
+
+    if (track->active && track->selected) {
+      if (!track->eos) {
+        gint cur_percent;
+
+        all_eos = FALSE;
+        if (min_level_time == GST_CLOCK_TIME_NONE) {
+          min_level_time = track->level_time;
+        } else if (track->level_time < min_level_time) {
+          min_level_time = track->level_time;
+        }
+
+        if (track->type & GST_STREAM_TYPE_VIDEO
+            && video_level_time > track->level_time)
+          video_level_time = track->level_time;
+
+        if (track->type & GST_STREAM_TYPE_AUDIO
+            && audio_level_time > track->level_time)
+          audio_level_time = track->level_time;
+
+        if (track->level_time != GST_CLOCK_TIME_NONE
+            && track->buffering_threshold != 0) {
+          cur_percent =
+              gst_util_uint64_scale (track->level_time, 100,
+              track->buffering_threshold);
+          if (min_percent < 0 || cur_percent < min_percent)
+            min_percent = cur_percent;
+        }
+      }
+    }
+  }
+
+  GST_DEBUG_OBJECT (demux,
+      "Minimum time level %" GST_TIME_FORMAT " percent %d all_eos:%d",
+      GST_TIME_ARGS (min_level_time), min_percent, all_eos);
+
+  /* Update demuxer video/audio level properties */
+  GST_OBJECT_LOCK (demux);
+  demux->current_level_time_video = video_level_time;
+  demux->current_level_time_audio = audio_level_time;
+  GST_OBJECT_UNLOCK (demux);
+
+  if (min_percent < 0 && !all_eos)
+    return;
+
+  if (min_percent > 100 || all_eos)
+    percent = 100;
+  else
+    percent = MAX (0, min_percent);
+
+  GST_LOG_OBJECT (demux, "percent : %d %%", percent);
+
+  if (demux->priv->is_buffering) {
+    if (percent >= 100)
+      demux->priv->is_buffering = FALSE;
+    if (demux->priv->percent != percent) {
+      demux->priv->percent = percent;
+      demux->priv->percent_changed = TRUE;
+    }
+  } else if (percent < 1) {
+    demux->priv->is_buffering = TRUE;
+    if (demux->priv->percent != percent) {
+      demux->priv->percent = percent;
+      demux->priv->percent_changed = TRUE;
+    }
+  }
+
+  if (demux->priv->percent_changed)
+    GST_DEBUG_OBJECT (demux, "Percent changed, %d %% is_buffering:%d", percent,
+        demux->priv->is_buffering);
+}
+
+/* With TRACKS_LOCK held */
+void
+demux_post_buffering_locked (GstAdaptiveDemux * demux)
+{
+  gint percent;
+  GstMessage *msg;
+
+  if (!demux->priv->percent_changed)
+    return;
+
+  BUFFERING_LOCK (demux);
+  percent = demux->priv->percent;
+  msg = gst_message_new_buffering ((GstObject *) demux, percent);
+  TRACKS_UNLOCK (demux);
+  gst_element_post_message ((GstElement *) demux, msg);
+
+  BUFFERING_UNLOCK (demux);
+  TRACKS_LOCK (demux);
+  if (percent == demux->priv->percent)
+    demux->priv->percent_changed = FALSE;
+}
+
+/* MANIFEST_LOCK and TRACKS_LOCK hold */
+GstAdaptiveDemux2Stream *
+find_stream_for_track_locked (GstAdaptiveDemux * demux,
+    GstAdaptiveDemuxTrack * track)
+{
+  GList *iter;
+
+  for (iter = demux->output_period->streams; iter; iter = iter->next) {
+    GstAdaptiveDemux2Stream *stream = (GstAdaptiveDemux2Stream *) iter->data;
+    if (g_list_find (stream->tracks, track))
+      return stream;
+  }
+
+  return NULL;
+}
+
+/* Scheduler context held, takes TRACKS_LOCK */
+static GstAdaptiveDemux2Stream *
+gst_adaptive_demux_find_stream_for_pad (GstAdaptiveDemux * demux, GstPad * pad)
+{
+  GList *iter;
+  GstAdaptiveDemuxTrack *track = NULL;
+  GstAdaptiveDemux2Stream *stream = NULL;
+
+  TRACKS_LOCK (demux);
+  for (iter = demux->output_period->tracks; iter; iter = g_list_next (iter)) {
+    OutputSlot *cand = iter->data;
+    if (cand->pad == pad) {
+      track = cand->track;
+      break;
+    }
+  }
+
+  if (track)
+    stream = find_stream_for_track_locked (demux, track);
+
+  TRACKS_UNLOCK (demux);
+
+  return stream;
+}
+
+/* Called from seek handler
+ *
+ * This function is used when a (flushing) seek caused a new period to be activated.
+ *
+ * This will ensure that:
+ * * the current output period is marked as finished (EOS)
+ * * Any potential intermediate (non-input/non-output) periods are removed
+ * * That the new input period is prepared and ready
+ */
+static void
+gst_adaptive_demux_seek_to_input_period (GstAdaptiveDemux * demux)
+{
+  GList *iter;
+
+  GST_DEBUG_OBJECT (demux,
+      "Preparing new input period %u", demux->input_period->period_num);
+
+  /* Prepare the new input period */
+  gst_adaptive_demux_update_collection (demux, demux->input_period);
+
+  /* Transfer the previous selection to the new input period */
+  gst_adaptive_demux_period_transfer_selection (demux, demux->input_period,
+      demux->output_period);
+  gst_adaptive_demux_prepare_streams (demux, FALSE);
+
+  /* Remove all periods except for the input (last) and output (first) period */
+  while (demux->priv->periods->length > 2) {
+    GstAdaptiveDemuxPeriod *period = g_queue_pop_nth (demux->priv->periods, 1);
+    /* Mark all tracks of the removed period as not selected and EOS so they
+     * will be skipped / ignored */
+    for (iter = period->tracks; iter; iter = iter->next) {
+      GstAdaptiveDemuxTrack *track = iter->data;
+      track->selected = FALSE;
+      track->eos = TRUE;
+    }
+    gst_adaptive_demux_period_unref (period);
+  }
+
+  /* Mark all tracks of the output period as EOS so that the output loop
+   * will immediately move to the new period */
+  for (iter = demux->output_period->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *track = iter->data;
+    track->eos = TRUE;
+  }
+
+  /* Go over all slots, and clear any pending track */
+  for (iter = demux->priv->outputs; iter; iter = iter->next) {
+    OutputSlot *slot = (OutputSlot *) iter->data;
+
+    if (slot->pending_track != NULL) {
+      GST_DEBUG_OBJECT (demux,
+          "Removing track '%s' as pending from output of current track '%s'",
+          slot->pending_track->stream_id, slot->track->stream_id);
+      gst_adaptive_demux_track_unref (slot->pending_track);
+      slot->pending_track = NULL;
+    }
+  }
+}
+
+/* must be called with manifest_lock taken */
+gboolean
+gst_adaptive_demux_get_live_seek_range (GstAdaptiveDemux * demux,
+    gint64 * range_start, gint64 * range_stop)
+{
+  GstAdaptiveDemuxClass *klass;
+
+  klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  g_return_val_if_fail (klass->get_live_seek_range, FALSE);
+
+  return klass->get_live_seek_range (demux, range_start, range_stop);
+}
+
+/* must be called with manifest_lock taken */
+gboolean
+gst_adaptive_demux2_stream_in_live_seek_range (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  gint64 range_start, range_stop;
+  if (gst_adaptive_demux_get_live_seek_range (demux, &range_start, &range_stop)) {
+    GST_LOG_OBJECT (stream,
+        "stream position %" GST_TIME_FORMAT "  live seek range %"
+        GST_STIME_FORMAT " - %" GST_STIME_FORMAT,
+        GST_TIME_ARGS (stream->current_position), GST_STIME_ARGS (range_start),
+        GST_STIME_ARGS (range_stop));
+    return (stream->current_position >= range_start
+        && stream->current_position <= range_stop);
+  }
+
+  return FALSE;
+}
+
+/* must be called with manifest_lock taken */
+static gboolean
+gst_adaptive_demux_can_seek (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *klass;
+
+  klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  if (gst_adaptive_demux_is_live (demux)) {
+    return klass->get_live_seek_range != NULL;
+  }
+
+  return klass->seek != NULL;
+}
+
+static void
+gst_adaptive_demux_setup_streams_for_restart (GstAdaptiveDemux * demux,
+    GstSeekType start_type, GstSeekType stop_type)
+{
+  GList *iter;
+
+  for (iter = demux->input_period->streams; iter; iter = g_list_next (iter)) {
+    GstAdaptiveDemux2Stream *stream = iter->data;
+
+    /* Make sure the download loop clears and restarts on the next start,
+     * which will recompute the stream segment */
+    g_assert (stream->state == GST_ADAPTIVE_DEMUX2_STREAM_STATE_STOPPED ||
+        stream->state == GST_ADAPTIVE_DEMUX2_STREAM_STATE_RESTART);
+    stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_RESTART;
+    stream->start_position = 0;
+
+    if (demux->segment.rate > 0 && start_type != GST_SEEK_TYPE_NONE)
+      stream->start_position = demux->segment.start;
+    else if (demux->segment.rate < 0 && stop_type != GST_SEEK_TYPE_NONE)
+      stream->start_position = demux->segment.stop;
+  }
+}
+
+#define IS_SNAP_SEEK(f) (f & (GST_SEEK_FLAG_SNAP_BEFORE |        \
+                              GST_SEEK_FLAG_SNAP_AFTER |         \
+                              GST_SEEK_FLAG_SNAP_NEAREST |       \
+                             GST_SEEK_FLAG_TRICKMODE_KEY_UNITS | \
+                             GST_SEEK_FLAG_KEY_UNIT))
+#define REMOVE_SNAP_FLAGS(f) (f & ~(GST_SEEK_FLAG_SNAP_BEFORE | \
+                              GST_SEEK_FLAG_SNAP_AFTER | \
+                              GST_SEEK_FLAG_SNAP_NEAREST))
+
+static gboolean
+gst_adaptive_demux_handle_seek_event (GstAdaptiveDemux * demux, GstPad * pad,
+    GstEvent * event)
+{
+  GstAdaptiveDemuxClass *demux_class = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  gdouble rate;
+  GstFormat format;
+  GstSeekFlags flags;
+  GstSeekType start_type, stop_type;
+  gint64 start, stop;
+  guint32 seqnum;
+  gboolean update;
+  gboolean ret;
+  GstSegment oldsegment;
+  GstAdaptiveDemux2Stream *stream = NULL;
+  GstEvent *flush_event;
+
+  GST_INFO_OBJECT (demux, "Received seek event");
+
+  GST_API_LOCK (demux);
+
+  gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
+      &stop_type, &stop);
+
+  if (format != GST_FORMAT_TIME) {
+    GST_API_UNLOCK (demux);
+    GST_WARNING_OBJECT (demux,
+        "Adaptive demuxers only support TIME-based seeking");
+    gst_event_unref (event);
+    return FALSE;
+  }
+
+  if (flags & GST_SEEK_FLAG_SEGMENT) {
+    GST_FIXME_OBJECT (demux, "Handle segment seeks");
+    GST_API_UNLOCK (demux);
+    gst_event_unref (event);
+    return FALSE;
+  }
+
+  seqnum = gst_event_get_seqnum (event);
+
+  if (!GST_ADAPTIVE_SCHEDULER_LOCK (demux)) {
+    GST_LOG_OBJECT (demux, "Failed to acquire scheduler context");
+    return FALSE;
+  }
+
+  if (flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE) {
+    /* For instant rate seeks, reply directly and update
+     * our segment so the new rate is reflected in any future
+     * fragments */
+    GstEvent *ev;
+    gdouble rate_multiplier;
+
+    /* instant rate change only supported if direction does not change. All
+     * other requirements are already checked before creating the seek event
+     * but let's double-check here to be sure */
+    if ((demux->segment.rate > 0 && rate < 0) ||
+        (demux->segment.rate < 0 && rate > 0) ||
+        start_type != GST_SEEK_TYPE_NONE ||
+        stop_type != GST_SEEK_TYPE_NONE || (flags & GST_SEEK_FLAG_FLUSH)) {
+      GST_ERROR_OBJECT (demux,
+          "Instant rate change seeks only supported in the "
+          "same direction, without flushing and position change");
+      GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+      GST_API_UNLOCK (demux);
+      return FALSE;
+    }
+
+    rate_multiplier = rate / demux->segment.rate;
+
+    ev = gst_event_new_instant_rate_change (rate_multiplier,
+        (GstSegmentFlags) flags);
+    gst_event_set_seqnum (ev, seqnum);
+
+    ret = gst_adaptive_demux_push_src_event (demux, ev);
+
+    if (ret) {
+      GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+      demux->instant_rate_multiplier = rate_multiplier;
+      GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+    }
+
+    GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+    GST_API_UNLOCK (demux);
+    gst_event_unref (event);
+
+    return ret;
+  }
+
+  if (!gst_adaptive_demux_can_seek (demux)) {
+    GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+
+    GST_API_UNLOCK (demux);
+    gst_event_unref (event);
+    return FALSE;
+  }
+
+  /* We can only accept flushing seeks from this point onward */
+  if (!(flags & GST_SEEK_FLAG_FLUSH)) {
+    GST_ERROR_OBJECT (demux,
+        "Non-flushing non-instant-rate seeks are not possible");
+
+    GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+
+    GST_API_UNLOCK (demux);
+    gst_event_unref (event);
+    return FALSE;
+  }
+
+  if (gst_adaptive_demux_is_live (demux)) {
+    gint64 range_start, range_stop;
+    gboolean changed = FALSE;
+    gboolean start_valid = TRUE, stop_valid = TRUE;
+
+    if (!gst_adaptive_demux_get_live_seek_range (demux, &range_start,
+            &range_stop)) {
+      GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+      GST_API_UNLOCK (demux);
+      gst_event_unref (event);
+      GST_WARNING_OBJECT (demux, "Failure getting the live seek ranges");
+      return FALSE;
+    }
+
+    GST_DEBUG_OBJECT (demux,
+        "Live range is %" GST_STIME_FORMAT " %" GST_STIME_FORMAT,
+        GST_STIME_ARGS (range_start), GST_STIME_ARGS (range_stop));
+
+    /* Handle relative positioning for live streams (relative to the range_stop) */
+    if (start_type == GST_SEEK_TYPE_END) {
+      start = range_stop + start;
+      start_type = GST_SEEK_TYPE_SET;
+      changed = TRUE;
+    }
+    if (stop_type == GST_SEEK_TYPE_END) {
+      stop = range_stop + stop;
+      stop_type = GST_SEEK_TYPE_SET;
+      changed = TRUE;
+    }
+
+    /* Adjust the requested start/stop position if it falls beyond the live
+     * seek range.
+     * The only case where we don't adjust is for the starting point of
+     * an accurate seek (start if forward and stop if backwards)
+     */
+    if (start_type == GST_SEEK_TYPE_SET && start < range_start &&
+        (rate < 0 || !(flags & GST_SEEK_FLAG_ACCURATE))) {
+      GST_DEBUG_OBJECT (demux,
+          "seek before live stream start, setting to range start: %"
+          GST_TIME_FORMAT, GST_TIME_ARGS (range_start));
+      start = range_start;
+      changed = TRUE;
+    }
+    /* truncate stop position also if set */
+    if (stop_type == GST_SEEK_TYPE_SET && stop > range_stop &&
+        (rate > 0 || !(flags & GST_SEEK_FLAG_ACCURATE))) {
+      GST_DEBUG_OBJECT (demux,
+          "seek ending after live start, adjusting to: %"
+          GST_TIME_FORMAT, GST_TIME_ARGS (range_stop));
+      stop = range_stop;
+      changed = TRUE;
+    }
+
+    if (start_type == GST_SEEK_TYPE_SET && GST_CLOCK_TIME_IS_VALID (start) &&
+        (start < range_start || start > range_stop)) {
+      GST_WARNING_OBJECT (demux,
+          "Seek to invalid position start:%" GST_STIME_FORMAT
+          " out of seekable range (%" GST_STIME_FORMAT " - %" GST_STIME_FORMAT
+          ")", GST_STIME_ARGS (start), GST_STIME_ARGS (range_start),
+          GST_STIME_ARGS (range_stop));
+      start_valid = FALSE;
+    }
+    if (stop_type == GST_SEEK_TYPE_SET && GST_CLOCK_TIME_IS_VALID (stop) &&
+        (stop < range_start || stop > range_stop)) {
+      GST_WARNING_OBJECT (demux,
+          "Seek to invalid position stop:%" GST_STIME_FORMAT
+          " out of seekable range (%" GST_STIME_FORMAT " - %" GST_STIME_FORMAT
+          ")", GST_STIME_ARGS (stop), GST_STIME_ARGS (range_start),
+          GST_STIME_ARGS (range_stop));
+      stop_valid = FALSE;
+    }
+
+    /* If the seek position is still outside of the seekable range, refuse the seek */
+    if (!start_valid || !stop_valid) {
+      GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+      GST_API_UNLOCK (demux);
+      gst_event_unref (event);
+      return FALSE;
+    }
+
+    /* Re-create seek event with changed/updated values */
+    if (changed) {
+      gst_event_unref (event);
+      event =
+          gst_event_new_seek (rate, format, flags,
+          start_type, start, stop_type, stop);
+      gst_event_set_seqnum (event, seqnum);
+    }
+  }
+
+  GST_DEBUG_OBJECT (demux, "seek event, %" GST_PTR_FORMAT, event);
+
+  /* have a backup in case seek fails */
+  gst_segment_copy_into (&demux->segment, &oldsegment);
+
+  GST_DEBUG_OBJECT (demux, "sending flush start");
+  flush_event = gst_event_new_flush_start ();
+  gst_event_set_seqnum (flush_event, seqnum);
+
+  gst_adaptive_demux_push_src_event (demux, flush_event);
+
+  gst_adaptive_demux_stop_tasks (demux, FALSE);
+  gst_adaptive_demux_reset_tracks (demux);
+
+  GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+
+  /*
+   * Handle snap seeks as follows:
+   * 1) do the snap seeking on the stream that received
+   *    the event
+   * 2) use the final position on this stream to seek
+   *    on the other streams to the same position
+   *
+   * We can't snap at all streams at the same time as
+   * they might end in different positions, so just
+   * use the one that received the event as the 'leading'
+   * one to do the snap seek.
+   *
+   * FIXME: Could use the global_output_position (running time)
+   * as the snap reference
+   */
+  if (IS_SNAP_SEEK (flags) && demux_class->stream_seek && (stream =
+          gst_adaptive_demux_find_stream_for_pad (demux, pad))) {
+    GstClockTimeDiff ts;
+    GstSeekFlags stream_seek_flags = flags;
+
+    /* snap-seek on the stream that received the event and then
+     * use the resulting position to seek on all streams */
+
+    if (rate >= 0) {
+      if (start_type != GST_SEEK_TYPE_NONE)
+        ts = start;
+      else {
+        ts = stream->current_position;
+        start_type = GST_SEEK_TYPE_SET;
+      }
+    } else {
+      if (stop_type != GST_SEEK_TYPE_NONE)
+        ts = stop;
+      else {
+        stop_type = GST_SEEK_TYPE_SET;
+        ts = stream->current_position;
+      }
+    }
+
+    if (stream) {
+      demux_class->stream_seek (stream, rate >= 0, stream_seek_flags, ts, &ts);
+    }
+
+    /* replace event with a new one without snapping to seek on all streams */
+    gst_event_unref (event);
+    if (rate >= 0) {
+      start = ts;
+    } else {
+      stop = ts;
+    }
+    event =
+        gst_event_new_seek (rate, format, REMOVE_SNAP_FLAGS (flags),
+        start_type, start, stop_type, stop);
+    GST_DEBUG_OBJECT (demux, "Adapted snap seek to %" GST_PTR_FORMAT, event);
+  }
+  stream = NULL;
+
+  ret = gst_segment_do_seek (&demux->segment, rate, format, flags, start_type,
+      start, stop_type, stop, &update);
+
+  if (ret) {
+    GST_DEBUG_OBJECT (demux, "Calling subclass seek: %" GST_PTR_FORMAT, event);
+
+    ret = demux_class->seek (demux, event);
+  }
+
+  if (!ret) {
+    /* Is there anything else we can do if it fails? */
+    gst_segment_copy_into (&oldsegment, &demux->segment);
+  } else {
+    demux->priv->segment_seqnum = seqnum;
+  }
+  GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+  /* Resetting flow combiner */
+  gst_flow_combiner_reset (demux->priv->flowcombiner);
+
+  GST_DEBUG_OBJECT (demux, "Sending flush stop on all pad");
+  flush_event = gst_event_new_flush_stop (TRUE);
+  gst_event_set_seqnum (flush_event, seqnum);
+  gst_adaptive_demux_push_src_event (demux, flush_event);
+
+  /* If the seek generated a new period, prepare it */
+  if (!demux->input_period->prepared) {
+    /* This can only happen on flushing seeks */
+    g_assert (flags & GST_SEEK_FLAG_FLUSH);
+    gst_adaptive_demux_seek_to_input_period (demux);
+  }
+
+  GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+  GST_DEBUG_OBJECT (demux, "Demuxer segment after seek: %" GST_SEGMENT_FORMAT,
+      &demux->segment);
+  gst_adaptive_demux_setup_streams_for_restart (demux, start_type, stop_type);
+  demux->priv->qos_earliest_time = GST_CLOCK_TIME_NONE;
+
+  /* Reset the global output position (running time) for when the output loop restarts */
+  demux->priv->global_output_position = 0;
+
+  /* After a flushing seek, any instant-rate override is undone */
+  demux->instant_rate_multiplier = 1.0;
+
+  GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+  /* Restart the demux */
+  gst_adaptive_demux_start_tasks (demux);
+
+  GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+  GST_API_UNLOCK (demux);
+  gst_event_unref (event);
+
+  return ret;
+}
+
+/* Returns TRUE if the stream has at least one selected track */
+static gboolean
+gst_adaptive_demux2_stream_has_selected_tracks (GstAdaptiveDemux2Stream *
+    stream)
+{
+  GList *tmp;
+
+  for (tmp = stream->tracks; tmp; tmp = tmp->next) {
+    GstAdaptiveDemuxTrack *track = tmp->data;
+
+    if (track->selected)
+      return TRUE;
+  }
+
+  return FALSE;
+}
+
+static gboolean
+handle_stream_selection (GstAdaptiveDemux * demux, GList * streams,
+    guint32 seqnum)
+{
+  gboolean selection_handled = TRUE;
+  GList *iter;
+  GList *tracks = NULL;
+
+  if (!GST_ADAPTIVE_SCHEDULER_LOCK (demux))
+    return FALSE;
+
+  TRACKS_LOCK (demux);
+  /* Validate the streams and fill:
+   * tracks : list of tracks corresponding to requested streams
+   */
+  for (iter = streams; iter; iter = iter->next) {
+    gchar *stream_id = (gchar *) iter->data;
+    GstAdaptiveDemuxTrack *track;
+
+    GST_DEBUG_OBJECT (demux, "Stream requested : %s", stream_id);
+    track = find_track_for_stream_id (demux->output_period, stream_id);
+    if (!track) {
+      GST_WARNING_OBJECT (demux, "Unrecognized stream_id '%s'", stream_id);
+      selection_handled = FALSE;
+      goto select_streams_done;
+    }
+    tracks = g_list_append (tracks, track);
+    GST_DEBUG_OBJECT (demux, "Track found, selected:%d", track->selected);
+  }
+
+  /* FIXME : ACTIVATING AND DEACTIVATING STREAMS SHOULD BE DONE FROM THE
+   * SCHEDULING THREAD */
+
+  /* FIXME: We want to iterate all streams, mark them as deselected,
+   * then iterate tracks and mark any streams that have at least 1
+   * active output track, then loop over all streams again and start/stop
+   * them as needed */
+
+  /* Go over all tracks present and (de)select based on current selection */
+  for (iter = demux->output_period->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) iter->data;
+
+    if (track->selected && !g_list_find (tracks, track)) {
+      GST_DEBUG_OBJECT (demux, "De-select track '%s' (active:%d)",
+          track->stream_id, track->active);
+      track->selected = FALSE;
+      track->draining = TRUE;
+    } else if (!track->selected && g_list_find (tracks, track)) {
+      GST_DEBUG_OBJECT (demux, "Selecting track '%s'", track->stream_id);
+
+      track->selected = TRUE;
+    }
+  }
+
+  /* Start or stop streams based on the updated track selection */
+  for (iter = demux->output_period->streams; iter; iter = iter->next) {
+    GstAdaptiveDemux2Stream *stream = iter->data;
+    GList *trackiter;
+
+    gboolean is_running = gst_adaptive_demux2_stream_is_running (stream);
+    gboolean should_be_running =
+        gst_adaptive_demux2_stream_has_selected_tracks (stream);
+
+    if (!is_running && should_be_running) {
+      GstClockTime output_running_ts = demux->priv->global_output_position;
+      GstClockTime start_position;
+
+      /* Calculate where we should start the stream, and then
+       * start it. */
+      GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+
+      GST_DEBUG_OBJECT (stream, "(Re)starting stream. Output running ts %"
+          GST_TIME_FORMAT " in demux segment %" GST_SEGMENT_FORMAT,
+          GST_TIME_ARGS (output_running_ts), &demux->segment);
+
+      start_position =
+          gst_segment_position_from_running_time (&demux->segment,
+          GST_FORMAT_TIME, output_running_ts);
+
+      GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+      GST_DEBUG_OBJECT (demux, "Setting stream start position to %"
+          GST_TIME_FORMAT, GST_TIME_ARGS (start_position));
+
+      stream->current_position = stream->start_position = start_position;
+      stream->compute_segment = TRUE;
+
+      /* If output has already begun, ensure we seek this segment
+       * to the correct restart position when the download loop begins */
+      if (output_running_ts != 0)
+        stream->state = GST_ADAPTIVE_DEMUX2_STREAM_STATE_RESTART;
+
+      /* Activate track pads for this stream */
+      for (trackiter = stream->tracks; trackiter; trackiter = trackiter->next) {
+        GstAdaptiveDemuxTrack *track =
+            (GstAdaptiveDemuxTrack *) trackiter->data;
+        gst_pad_set_active (track->sinkpad, TRUE);
+      }
+
+      gst_adaptive_demux2_stream_start (stream);
+    } else if (is_running && !should_be_running) {
+      /* Stream should not be running and needs stopping */
+      gst_adaptive_demux2_stream_stop (stream);
+
+      /* Set all track sinkpads to inactive for this stream */
+      for (trackiter = stream->tracks; trackiter; trackiter = trackiter->next) {
+        GstAdaptiveDemuxTrack *track =
+            (GstAdaptiveDemuxTrack *) trackiter->data;
+        gst_pad_set_active (track->sinkpad, FALSE);
+      }
+    }
+  }
+
+  g_atomic_int_set (&demux->priv->requested_selection_seqnum, seqnum);
+
+select_streams_done:
+  demux_update_buffering_locked (demux);
+  demux_post_buffering_locked (demux);
+
+  TRACKS_UNLOCK (demux);
+  GST_ADAPTIVE_SCHEDULER_UNLOCK (demux);
+
+  if (tracks)
+    g_list_free (tracks);
+  return selection_handled;
+}
+
+static gboolean
+gst_adaptive_demux_src_event (GstPad * pad, GstObject * parent,
+    GstEvent * event)
+{
+  GstAdaptiveDemux *demux;
+
+  demux = GST_ADAPTIVE_DEMUX_CAST (parent);
+
+  switch (event->type) {
+    case GST_EVENT_SEEK:
+    {
+      guint32 seqnum = gst_event_get_seqnum (event);
+      if (seqnum == demux->priv->segment_seqnum) {
+        GST_LOG_OBJECT (pad,
+            "Drop duplicated SEEK event seqnum %" G_GUINT32_FORMAT, seqnum);
+        gst_event_unref (event);
+        return TRUE;
+      }
+      return gst_adaptive_demux_handle_seek_event (demux, pad, event);
+    }
+    case GST_EVENT_LATENCY:{
+      /* Upstream and our internal source are irrelevant
+       * for latency, and we should not fail here to
+       * configure the latency */
+      gst_event_unref (event);
+      return TRUE;
+    }
+    case GST_EVENT_QOS:{
+      GstClockTimeDiff diff;
+      GstClockTime timestamp;
+      GstClockTime earliest_time;
+
+      gst_event_parse_qos (event, NULL, NULL, &diff, &timestamp);
+      /* Only take into account lateness if late */
+      if (diff > 0)
+        earliest_time = timestamp + 2 * diff;
+      else
+        earliest_time = timestamp;
+
+      GST_OBJECT_LOCK (demux);
+      if (!GST_CLOCK_TIME_IS_VALID (demux->priv->qos_earliest_time) ||
+          earliest_time > demux->priv->qos_earliest_time) {
+        demux->priv->qos_earliest_time = earliest_time;
+        GST_DEBUG_OBJECT (demux, "qos_earliest_time %" GST_TIME_FORMAT,
+            GST_TIME_ARGS (demux->priv->qos_earliest_time));
+      }
+      GST_OBJECT_UNLOCK (demux);
+      break;
+    }
+    case GST_EVENT_SELECT_STREAMS:
+    {
+      GList *streams;
+      gboolean selection_handled;
+
+      if (GST_EVENT_SEQNUM (event) ==
+          g_atomic_int_get (&demux->priv->requested_selection_seqnum)) {
+        GST_DEBUG_OBJECT (demux, "Already handled/handling select-streams %d",
+            GST_EVENT_SEQNUM (event));
+        return TRUE;
+      }
+
+      gst_event_parse_select_streams (event, &streams);
+      selection_handled =
+          handle_stream_selection (demux, streams, GST_EVENT_SEQNUM (event));
+      g_list_free_full (streams, g_free);
+      return selection_handled;
+    }
+    default:
+      break;
+  }
+
+  return gst_pad_event_default (pad, parent, event);
+}
+
+static gboolean
+gst_adaptive_demux_src_query (GstPad * pad, GstObject * parent,
+    GstQuery * query)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (parent);
+  gboolean ret = FALSE;
+
+  if (query == NULL)
+    return FALSE;
+
+
+  switch (query->type) {
+    case GST_QUERY_DURATION:{
+      GstFormat fmt;
+      GstClockTime duration = GST_CLOCK_TIME_NONE;
+
+      gst_query_parse_duration (query, &fmt, NULL);
+
+      if (gst_adaptive_demux_is_live (demux)) {
+        /* We are able to answer this query: the duration is unknown */
+        gst_query_set_duration (query, fmt, -1);
+        ret = TRUE;
+        break;
+      }
+
+      if (fmt == GST_FORMAT_TIME
+          && g_atomic_int_get (&demux->priv->have_manifest)) {
+
+        GST_MANIFEST_LOCK (demux);
+        duration = demux->priv->duration;
+        GST_MANIFEST_UNLOCK (demux);
+
+        if (GST_CLOCK_TIME_IS_VALID (duration) && duration > 0) {
+          gst_query_set_duration (query, GST_FORMAT_TIME, duration);
+          ret = TRUE;
+        }
+      }
+
+      GST_LOG_OBJECT (demux, "GST_QUERY_DURATION returns %s with duration %"
+          GST_TIME_FORMAT, ret ? "TRUE" : "FALSE", GST_TIME_ARGS (duration));
+      break;
+    }
+    case GST_QUERY_LATENCY:{
+      gst_query_set_latency (query, FALSE, 0, -1);
+      ret = TRUE;
+      break;
+    }
+    case GST_QUERY_SEEKING:{
+      GstFormat fmt;
+      gint64 stop = -1;
+      gint64 start = 0;
+
+      if (!g_atomic_int_get (&demux->priv->have_manifest)) {
+        GST_INFO_OBJECT (demux,
+            "Don't have manifest yet, can't answer seeking query");
+        return FALSE;           /* can't answer without manifest */
+      }
+
+      GST_MANIFEST_LOCK (demux);
+
+      gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+      GST_INFO_OBJECT (demux, "Received GST_QUERY_SEEKING with format %d", fmt);
+      if (fmt == GST_FORMAT_TIME) {
+        GstClockTime duration;
+        gboolean can_seek = gst_adaptive_demux_can_seek (demux);
+
+        ret = TRUE;
+        if (can_seek) {
+          if (gst_adaptive_demux_is_live (demux)) {
+            ret = gst_adaptive_demux_get_live_seek_range (demux, &start, &stop);
+            if (!ret) {
+              GST_MANIFEST_UNLOCK (demux);
+              GST_INFO_OBJECT (demux, "can't answer seeking query");
+              return FALSE;
+            }
+          } else {
+            duration = demux->priv->duration;
+            if (GST_CLOCK_TIME_IS_VALID (duration) && duration > 0)
+              stop = duration;
+          }
+        }
+        gst_query_set_seeking (query, fmt, can_seek, start, stop);
+        GST_INFO_OBJECT (demux, "GST_QUERY_SEEKING returning with start : %"
+            GST_TIME_FORMAT ", stop : %" GST_TIME_FORMAT,
+            GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+      }
+      GST_MANIFEST_UNLOCK (demux);
+      break;
+    }
+    case GST_QUERY_URI:
+
+      GST_MANIFEST_LOCK (demux);
+
+      /* TODO HLS can answer this differently it seems */
+      if (demux->manifest_uri) {
+        /* FIXME: (hls) Do we answer with the variant playlist, with the current
+         * playlist or the the uri of the last downlowaded fragment? */
+        gst_query_set_uri (query, demux->manifest_uri);
+        ret = TRUE;
+      }
+
+      GST_MANIFEST_UNLOCK (demux);
+      break;
+    case GST_QUERY_SELECTABLE:
+      gst_query_set_selectable (query, TRUE);
+      ret = TRUE;
+      break;
+    default:
+      /* Don't forward queries upstream because of the special nature of this
+       *  "demuxer", which relies on the upstream element only to be fed
+       *  the Manifest
+       */
+      break;
+  }
+
+  return ret;
+}
+
+/* Called when the scheduler starts, to kick off manifest updates
+ * and stream downloads */
+static gboolean
+gst_adaptive_demux_scheduler_start_cb (GstAdaptiveDemux * demux)
+{
+  GList *iter;
+
+  GST_INFO_OBJECT (demux, "Starting streams' tasks");
+
+  iter = demux->input_period->streams;
+
+  for (; iter; iter = g_list_next (iter)) {
+    GstAdaptiveDemux2Stream *stream = iter->data;
+
+    /* If we need to process this stream to discover tracks *OR* it has any
+     * tracks which are selected, start it now */
+    if ((stream->pending_tracks == TRUE)
+        || gst_adaptive_demux2_stream_is_selected_locked (stream))
+      gst_adaptive_demux2_stream_start (stream);
+  }
+
+  return FALSE;
+}
+
+/* must be called with manifest_lock taken */
+static void
+gst_adaptive_demux_start_tasks (GstAdaptiveDemux * demux)
+{
+  if (!gst_adaptive_demux2_is_running (demux)) {
+    GST_DEBUG_OBJECT (demux, "Not starting tasks due to shutdown");
+    return;
+  }
+
+  GST_DEBUG_OBJECT (demux, "Starting the SCHEDULER task");
+  gst_adaptive_demux_loop_call (demux->priv->scheduler_task,
+      (GSourceFunc) gst_adaptive_demux_scheduler_start_cb, demux, NULL);
+
+  TRACKS_LOCK (demux);
+  demux->priv->flushing = FALSE;
+  GST_DEBUG_OBJECT (demux, "Starting the output task");
+  gst_task_start (demux->priv->output_task);
+  TRACKS_UNLOCK (demux);
+}
+
+/* must be called with manifest_lock taken */
+static void
+gst_adaptive_demux_stop_manifest_update_task (GstAdaptiveDemux * demux)
+{
+  GST_DEBUG_OBJECT (demux, "requesting stop of the manifest update task");
+  if (demux->priv->manifest_updates_cb != 0) {
+    gst_adaptive_demux_loop_cancel_call (demux->priv->scheduler_task,
+        demux->priv->manifest_updates_cb);
+    demux->priv->manifest_updates_cb = 0;
+  }
+}
+
+static gboolean gst_adaptive_demux_updates_start_cb (GstAdaptiveDemux * demux);
+
+/* must be called with manifest_lock taken */
+static void
+gst_adaptive_demux_start_manifest_update_task (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *demux_class = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  if (gst_adaptive_demux_is_live (demux)) {
+    /* Task to periodically update the manifest */
+    if (demux_class->requires_periodical_playlist_update (demux)) {
+      GST_DEBUG_OBJECT (demux, "requesting start of the manifest update task");
+      if (demux->priv->manifest_updates_cb == 0) {
+        demux->priv->manifest_updates_cb =
+            gst_adaptive_demux_loop_call (demux->priv->scheduler_task,
+            (GSourceFunc) gst_adaptive_demux_updates_start_cb, demux, NULL);
+      }
+    }
+  }
+}
+
+/* must be called with manifest_lock taken
+ * This function will temporarily release manifest_lock in order to join the
+ * download threads.
+ * The api_lock will still protect it against other threads trying to modify
+ * the demux element.
+ */
+static void
+gst_adaptive_demux_stop_tasks (GstAdaptiveDemux * demux, gboolean stop_updates)
+{
+  GST_LOG_OBJECT (demux, "Stopping tasks");
+
+  if (stop_updates)
+    gst_adaptive_demux_stop_manifest_update_task (demux);
+
+  TRACKS_LOCK (demux);
+  demux->priv->flushing = TRUE;
+  g_cond_signal (&demux->priv->tracks_add);
+  gst_task_stop (demux->priv->output_task);
+  TRACKS_UNLOCK (demux);
+
+  gst_task_join (demux->priv->output_task);
+
+  if (demux->input_period)
+    gst_adaptive_demux_period_stop_tasks (demux->input_period);
+
+  demux->priv->qos_earliest_time = GST_CLOCK_TIME_NONE;
+}
+
+/* must be called with manifest_lock taken */
+static gboolean
+gst_adaptive_demux_push_src_event (GstAdaptiveDemux * demux, GstEvent * event)
+{
+  GList *iter;
+  gboolean ret = TRUE;
+
+  GST_DEBUG_OBJECT (demux, "event %" GST_PTR_FORMAT, event);
+
+  TRACKS_LOCK (demux);
+  for (iter = demux->priv->outputs; iter; iter = g_list_next (iter)) {
+    OutputSlot *slot = (OutputSlot *) iter->data;
+    gst_event_ref (event);
+    GST_DEBUG_OBJECT (slot->pad, "Pushing event");
+    ret = ret & gst_pad_push_event (slot->pad, event);
+    if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP)
+      slot->pushed_timed_data = FALSE;
+  }
+  TRACKS_UNLOCK (demux);
+  gst_event_unref (event);
+  return ret;
+}
+
+/* must be called with manifest_lock taken */
+void
+gst_adaptive_demux2_stream_set_caps (GstAdaptiveDemux2Stream * stream,
+    GstCaps * caps)
+{
+  GST_DEBUG_OBJECT (stream,
+      "setting new caps for stream %" GST_PTR_FORMAT, caps);
+  gst_caps_replace (&stream->pending_caps, caps);
+  gst_caps_unref (caps);
+}
+
+/* must be called with manifest_lock taken */
+void
+gst_adaptive_demux2_stream_set_tags (GstAdaptiveDemux2Stream * stream,
+    GstTagList * tags)
+{
+  GST_DEBUG_OBJECT (stream,
+      "setting new tags for stream %" GST_PTR_FORMAT, tags);
+  if (stream->pending_tags) {
+    gst_tag_list_unref (stream->pending_tags);
+  }
+  stream->pending_tags = tags;
+}
+
+/* must be called with manifest_lock taken */
+void
+gst_adaptive_demux2_stream_queue_event (GstAdaptiveDemux2Stream * stream,
+    GstEvent * event)
+{
+  stream->pending_events = g_list_append (stream->pending_events, event);
+}
+
+static guint64
+_update_average_bitrate (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, guint64 new_bitrate)
+{
+  gint index = stream->moving_index % NUM_LOOKBACK_FRAGMENTS;
+
+  stream->moving_bitrate -= stream->fragment_bitrates[index];
+  stream->fragment_bitrates[index] = new_bitrate;
+  stream->moving_bitrate += new_bitrate;
+
+  stream->moving_index += 1;
+
+  if (stream->moving_index > NUM_LOOKBACK_FRAGMENTS)
+    return stream->moving_bitrate / NUM_LOOKBACK_FRAGMENTS;
+  return stream->moving_bitrate / stream->moving_index;
+}
+
+static guint64
+gst_adaptive_demux2_stream_update_current_bitrate (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  guint64 average_bitrate;
+  guint64 fragment_bitrate;
+  guint connection_speed, min_bitrate, max_bitrate, target_download_rate;
+
+  fragment_bitrate = stream->last_bitrate;
+  GST_DEBUG_OBJECT (stream, "Download bitrate is : %" G_GUINT64_FORMAT " bps",
+      fragment_bitrate);
+
+  average_bitrate = _update_average_bitrate (demux, stream, fragment_bitrate);
+
+  GST_INFO_OBJECT (stream,
+      "last fragment bitrate was %" G_GUINT64_FORMAT, fragment_bitrate);
+  GST_INFO_OBJECT (stream,
+      "Last %u fragments average bitrate is %" G_GUINT64_FORMAT,
+      NUM_LOOKBACK_FRAGMENTS, average_bitrate);
+
+  /* Conservative approach, make sure we don't upgrade too fast */
+  GST_OBJECT_LOCK (demux);
+  stream->current_download_rate = MIN (average_bitrate, fragment_bitrate);
+
+  /* If this is the/a video stream update the overall demuxer
+   * reported bitrate and notify, to give the application a
+   * chance to choose a new connection-bitrate */
+  if ((stream->stream_type & GST_STREAM_TYPE_VIDEO) != 0) {
+    demux->current_download_rate = stream->current_download_rate;
+    GST_OBJECT_UNLOCK (demux);
+    g_object_notify (G_OBJECT (demux), "current-bandwidth");
+    GST_OBJECT_LOCK (demux);
+  }
+
+  connection_speed = demux->connection_speed;
+  min_bitrate = demux->min_bitrate;
+  max_bitrate = demux->max_bitrate;
+  GST_OBJECT_UNLOCK (demux);
+
+  if (connection_speed) {
+    GST_LOG_OBJECT (stream, "connection-speed is set to %u kbps, using it",
+        connection_speed / 1000);
+    return connection_speed;
+  }
+
+  /* No explicit connection_speed, so choose the new variant to use as a
+   * fraction of the measured download rate */
+  target_download_rate =
+      CLAMP (stream->current_download_rate, 0,
+      G_MAXUINT) * demux->bandwidth_target_ratio;
+
+  GST_DEBUG_OBJECT (stream, "Bitrate after target ratio limit (%0.2f): %u",
+      demux->bandwidth_target_ratio, target_download_rate);
+
+#if 0
+  /* Debugging code, modulate the bitrate every few fragments */
+  {
+    static guint ctr = 0;
+    if (ctr % 3 == 0) {
+      GST_INFO_OBJECT (stream, "Halving reported bitrate for debugging");
+      target_download_rate /= 2;
+    }
+    ctr++;
+  }
+#endif
+
+  if (min_bitrate > 0 && target_download_rate < min_bitrate) {
+    target_download_rate = min_bitrate;
+    GST_LOG_OBJECT (stream, "Bitrate adjusted due to min-bitrate : %u bits/s",
+        min_bitrate);
+  }
+
+  if (max_bitrate > 0 && target_download_rate > max_bitrate) {
+    target_download_rate = max_bitrate;
+    GST_LOG_OBJECT (stream, "Bitrate adjusted due to max-bitrate : %u bits/s",
+        max_bitrate);
+  }
+
+  GST_DEBUG_OBJECT (stream, "Returning target download rate of %u bps",
+      target_download_rate);
+
+  return target_download_rate;
+}
+
+/* must be called with manifest_lock taken */
+static GstFlowReturn
+gst_adaptive_demux2_stream_finish_fragment_default (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  /* No need to advance, this isn't a real fragment */
+  if (G_UNLIKELY (stream->downloading_header || stream->downloading_index))
+    return GST_FLOW_OK;
+
+  return gst_adaptive_demux2_stream_advance_fragment (demux, stream,
+      stream->fragment.duration);
+}
+
+/* must be called with manifest_lock taken.
+ * Can temporarily release manifest_lock
+ */
+static GstFlowReturn
+gst_adaptive_demux2_stream_data_received_default (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer)
+{
+  return gst_adaptive_demux2_stream_push_buffer (stream, buffer);
+}
+
+static gboolean
+gst_adaptive_demux_requires_periodical_playlist_update_default (GstAdaptiveDemux
+    * demux)
+{
+  return TRUE;
+}
+
+/* Called when a stream needs waking after the manifest is updated */
+void
+gst_adaptive_demux2_stream_wants_manifest_update (GstAdaptiveDemux * demux)
+{
+  demux->priv->stream_waiting_for_manifest = TRUE;
+}
+
+static gboolean
+gst_adaptive_demux_manifest_update_cb (GstAdaptiveDemux * demux)
+{
+  GstFlowReturn ret = GST_FLOW_OK;
+  gboolean schedule_again = TRUE;
+
+  GST_MANIFEST_LOCK (demux);
+  demux->priv->manifest_updates_cb = 0;
+
+  /* Updating playlist only needed for live playlists */
+  if (!gst_adaptive_demux_is_live (demux)) {
+    GST_MANIFEST_UNLOCK (demux);
+    return G_SOURCE_REMOVE;
+  }
+
+  GST_DEBUG_OBJECT (demux, "Updating playlist");
+  ret = gst_adaptive_demux_update_manifest (demux);
+
+  if (ret == GST_FLOW_EOS) {
+    GST_MANIFEST_UNLOCK (demux);
+    return G_SOURCE_REMOVE;
+  }
+
+  if (ret == GST_FLOW_OK) {
+    GST_DEBUG_OBJECT (demux, "Updated playlist successfully");
+    demux->priv->update_failed_count = 0;
+
+    /* Wake up download tasks */
+    if (demux->priv->stream_waiting_for_manifest) {
+      GList *iter;
+
+      for (iter = demux->input_period->streams; iter; iter = g_list_next (iter)) {
+        GstAdaptiveDemux2Stream *stream = iter->data;
+        gst_adaptive_demux2_stream_on_manifest_update (stream);
+      }
+      demux->priv->stream_waiting_for_manifest = FALSE;
+    }
+  } else {
+    demux->priv->update_failed_count++;
+
+    if (demux->priv->update_failed_count <= DEFAULT_FAILED_COUNT) {
+      GST_WARNING_OBJECT (demux, "Could not update the playlist, flow: %s",
+          gst_flow_get_name (ret));
+    } else {
+      GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+          (_("Internal data stream error.")), ("Could not update playlist"));
+      GST_DEBUG_OBJECT (demux, "Stopped manifest updates because of error");
+      schedule_again = FALSE;
+    }
+  }
+
+  if (schedule_again) {
+    GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+    demux->priv->manifest_updates_cb =
+        gst_adaptive_demux_loop_call_delayed (demux->priv->scheduler_task,
+        klass->get_manifest_update_interval (demux) * GST_USECOND,
+        (GSourceFunc) gst_adaptive_demux_manifest_update_cb, demux, NULL);
+  }
+
+  GST_MANIFEST_UNLOCK (demux);
+
+  return G_SOURCE_REMOVE;
+}
+
+static gboolean
+gst_adaptive_demux_updates_start_cb (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  /* Loop for updating of the playlist. This periodically checks if
+   * the playlist is updated and does so, then signals the streaming
+   * thread in case it can continue downloading now. */
+
+  /* block until the next scheduled update or the signal to quit this thread */
+  GST_DEBUG_OBJECT (demux, "Started updates task");
+  demux->priv->manifest_updates_cb =
+      gst_adaptive_demux_loop_call_delayed (demux->priv->scheduler_task,
+      klass->get_manifest_update_interval (demux) * GST_USECOND,
+      (GSourceFunc) gst_adaptive_demux_manifest_update_cb, demux, NULL);
+
+  return G_SOURCE_REMOVE;
+}
+
+static OutputSlot *
+find_replacement_slot_for_track (GstAdaptiveDemux * demux,
+    GstAdaptiveDemuxTrack * track)
+{
+  GList *tmp;
+
+  for (tmp = demux->priv->outputs; tmp; tmp = tmp->next) {
+    OutputSlot *slot = (OutputSlot *) tmp->data;
+    /* Incompatible output type */
+    if (slot->type != track->type)
+      continue;
+
+    /* Slot which is already assigned to this pending track */
+    if (slot->pending_track == track)
+      return slot;
+
+    /* slot already used for another pending track */
+    if (slot->pending_track != NULL)
+      continue;
+
+    /* Current output track is of the same type and is draining */
+    if (slot->track && slot->track->draining)
+      return slot;
+  }
+
+  return NULL;
+}
+
+/* TRACKS_LOCK taken */
+static OutputSlot *
+find_slot_for_track (GstAdaptiveDemux * demux, GstAdaptiveDemuxTrack * track)
+{
+  GList *tmp;
+
+  for (tmp = demux->priv->outputs; tmp; tmp = tmp->next) {
+    OutputSlot *slot = (OutputSlot *) tmp->data;
+
+    if (slot->track == track)
+      return slot;
+  }
+
+  return NULL;
+}
+
+/* TRACKS_LOCK held */
+static GstMessage *
+all_selected_tracks_are_active (GstAdaptiveDemux * demux, guint32 seqnum)
+{
+  GList *tmp;
+  GstMessage *msg;
+
+  for (tmp = demux->output_period->tracks; tmp; tmp = tmp->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) tmp->data;
+
+    if (track->selected && !track->active)
+      return NULL;
+  }
+
+  /* All selected tracks are active, created message */
+  msg =
+      gst_message_new_streams_selected (GST_OBJECT (demux),
+      demux->output_period->collection);
+  GST_MESSAGE_SEQNUM (msg) = seqnum;
+  for (tmp = demux->output_period->tracks; tmp; tmp = tmp->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) tmp->data;
+    if (track->active) {
+      gst_message_streams_selected_add (msg, track->stream_object);
+    }
+  }
+
+  return msg;
+}
+
+static void
+gst_adaptive_demux_send_initial_events (GstAdaptiveDemux * demux,
+    OutputSlot * slot)
+{
+  GstAdaptiveDemuxTrack *track = slot->track;
+  GstEvent *event;
+
+  /* Send EVENT_STREAM_START */
+  event = gst_event_new_stream_start (track->stream_id);
+  if (demux->have_group_id)
+    gst_event_set_group_id (event, demux->group_id);
+  gst_event_set_stream_flags (event, track->flags);
+  gst_event_set_stream (event, track->stream_object);
+  GST_DEBUG_OBJECT (demux, "Sending stream-start for track '%s'",
+      track->stream_id);
+  gst_pad_push_event (slot->pad, event);
+
+  /* Send EVENT_STREAM_COLLECTION */
+  event = gst_event_new_stream_collection (demux->output_period->collection);
+  GST_DEBUG_OBJECT (demux, "Sending stream-collection for track '%s'",
+      track->stream_id);
+  gst_pad_push_event (slot->pad, event);
+
+  /* Mark all sticky events for re-sending */
+  gst_event_store_mark_all_undelivered (&track->sticky_events);
+}
+
+/*
+ * Called with TRACKS_LOCK taken
+ */
+static void
+check_and_handle_selection_update_locked (GstAdaptiveDemux * demux)
+{
+  GList *tmp;
+  guint requested_selection_seqnum;
+  GstMessage *msg;
+
+  /* If requested_selection_seqnum != current_selection_seqnum, re-check all
+     output slots vs active/draining tracks */
+  requested_selection_seqnum =
+      g_atomic_int_get (&demux->priv->requested_selection_seqnum);
+
+  if (requested_selection_seqnum == demux->priv->current_selection_seqnum)
+    return;
+
+  GST_DEBUG_OBJECT (demux, "Selection changed, re-checking all output slots");
+
+  /* Go over all slots, and if they have a pending track that's no longer
+   * selected, clear it so the slot can be reused */
+  for (tmp = demux->priv->outputs; tmp; tmp = tmp->next) {
+    OutputSlot *slot = (OutputSlot *) tmp->data;
+
+    if (slot->pending_track != NULL && !slot->pending_track->selected) {
+      GST_DEBUG_OBJECT (demux,
+          "Removing deselected track '%s' as pending from output of current track '%s'",
+          slot->pending_track->stream_id, slot->track->stream_id);
+      gst_adaptive_demux_track_unref (slot->pending_track);
+      slot->pending_track = NULL;
+    }
+  }
+
+  /* Go over all tracks and create/re-assign/remove slots */
+  for (tmp = demux->output_period->tracks; tmp; tmp = tmp->next) {
+    GstAdaptiveDemuxTrack *track = (GstAdaptiveDemuxTrack *) tmp->data;
+
+    if (track->selected) {
+      OutputSlot *slot = find_slot_for_track (demux, track);
+
+      /* 0. Track is selected and has a slot. Nothing to do */
+      if (slot) {
+        GST_DEBUG_OBJECT (demux, "Track '%s' is already being outputted",
+            track->stream_id);
+        continue;
+      }
+
+      slot = find_replacement_slot_for_track (demux, track);
+      if (slot) {
+        /* 1. There is an existing slot of the same type which is currently
+         *    draining, assign this track as a replacement for it */
+        g_assert (slot->pending_track == NULL || slot->pending_track == track);
+        if (slot->pending_track == NULL) {
+          slot->pending_track = gst_adaptive_demux_track_ref (track);
+          GST_DEBUG_OBJECT (demux,
+              "Track '%s' will be used on output of track '%s'",
+              track->stream_id, slot->track->stream_id);
+        }
+      } else {
+        /* 2. There is no compatible replacement slot, create a new one */
+        slot = gst_adaptive_demux_output_slot_new (demux, track->type);
+        GST_DEBUG_OBJECT (demux, "Created slot for track '%s'",
+            track->stream_id);
+        demux->priv->outputs = g_list_append (demux->priv->outputs, slot);
+
+        track->update_next_segment = TRUE;
+
+        slot->track = gst_adaptive_demux_track_ref (track);
+        track->active = TRUE;
+        gst_adaptive_demux_send_initial_events (demux, slot);
+      }
+
+      /* If we were draining this track, we no longer are */
+      track->draining = FALSE;
+    }
+  }
+
+  /* Finally check all slots have a current/pending track. If not remove it */
+  for (tmp = demux->priv->outputs; tmp;) {
+    OutputSlot *slot = (OutputSlot *) tmp->data;
+    /* We should never has slots without target tracks */
+    g_assert (slot->track);
+    if (slot->track->draining && !slot->pending_track) {
+      GstAdaptiveDemux2Stream *stream;
+
+      GST_DEBUG_OBJECT (demux, "Output for track '%s' is no longer used",
+          slot->track->stream_id);
+      slot->track->active = FALSE;
+
+      /* If the stream feeding this track is stopped, flush and clear
+       * the track now that it's going inactive. If the stream was not
+       * found, it means we advanced past that period already (and the
+       * stream was stopped and discarded) */
+      stream = find_stream_for_track_locked (demux, slot->track);
+      if (stream != NULL && !gst_adaptive_demux2_stream_is_running (stream))
+        gst_adaptive_demux_track_flush (slot->track);
+
+      tmp = demux->priv->outputs = g_list_remove (demux->priv->outputs, slot);
+      gst_adaptive_demux_output_slot_free (demux, slot);
+    } else
+      tmp = tmp->next;
+  }
+
+  demux->priv->current_selection_seqnum = requested_selection_seqnum;
+  msg = all_selected_tracks_are_active (demux, requested_selection_seqnum);
+  if (msg) {
+    TRACKS_UNLOCK (demux);
+    GST_DEBUG_OBJECT (demux, "Posting streams-selected");
+    gst_element_post_message (GST_ELEMENT_CAST (demux), msg);
+    TRACKS_LOCK (demux);
+  }
+}
+
+/* TRACKS_LOCK held */
+static gboolean
+gst_adaptive_demux_advance_output_period (GstAdaptiveDemux * demux)
+{
+  GList *iter;
+  GstAdaptiveDemuxPeriod *previous_period;
+  GstStreamCollection *collection;
+
+  /* Grab the next period, should be demux->periods->next->data */
+  previous_period = g_queue_pop_head (demux->priv->periods);
+
+  /* Remove ref held by demux->output_period */
+  gst_adaptive_demux_period_unref (previous_period);
+  demux->output_period =
+      gst_adaptive_demux_period_ref (g_queue_peek_head (demux->priv->periods));
+
+  GST_DEBUG_OBJECT (demux, "Moved output to period %d",
+      demux->output_period->period_num);
+
+  /* We can now post the collection of the new period */
+  collection = demux->output_period->collection;
+  TRACKS_UNLOCK (demux);
+  gst_element_post_message (GST_ELEMENT_CAST (demux),
+      gst_message_new_stream_collection (GST_OBJECT (demux), collection));
+  TRACKS_LOCK (demux);
+
+  /* Unselect all tracks of the previous period */
+  for (iter = previous_period->tracks; iter; iter = iter->next) {
+    GstAdaptiveDemuxTrack *track = iter->data;
+    if (track->selected) {
+      track->selected = FALSE;
+      track->draining = TRUE;
+    }
+  }
+
+  /* Force a selection re-check */
+  g_atomic_int_inc (&demux->priv->requested_selection_seqnum);
+  check_and_handle_selection_update_locked (demux);
+
+  /* Remove the final ref on the previous period now that we have done the switch */
+  gst_adaptive_demux_period_unref (previous_period);
+
+  return TRUE;
+}
+
+/* Called with TRACKS_LOCK taken */
+static void
+handle_slot_pending_track_switch_locked (GstAdaptiveDemux * demux,
+    OutputSlot * slot)
+{
+  GstAdaptiveDemuxTrack *track = slot->track;
+  GstMessage *msg;
+  gboolean pending_is_ready;
+  GstAdaptiveDemux2Stream *stream;
+
+  /* If we have a pending track for this slot, the current track should be
+   * draining and no longer selected */
+  g_assert (track->draining && !track->selected);
+
+  /* If we're draining, check if the pending track has enough data *or* that
+     we've already drained out entirely */
+  pending_is_ready =
+      (slot->pending_track->level_time >=
+      slot->pending_track->buffering_threshold);
+  pending_is_ready |= slot->pending_track->eos;
+
+  if (!pending_is_ready && gst_queue_array_get_length (track->queue) > 0) {
+    GST_DEBUG_OBJECT (demux,
+        "Replacement track '%s' doesn't have enough data for switching yet",
+        slot->pending_track->stream_id);
+    return;
+  }
+
+  GST_DEBUG_OBJECT (demux,
+      "Pending replacement track has enough data, switching");
+  track->active = FALSE;
+  track->draining = FALSE;
+
+  /* If the stream feeding this track is stopped, flush and clear
+   * the track now that it's going inactive. If the stream was not
+   * found, it means we advanced past that period already (and the
+   * stream was stopped and discarded) */
+  stream = find_stream_for_track_locked (demux, track);
+  if (stream != NULL && !gst_adaptive_demux2_stream_is_running (stream))
+    gst_adaptive_demux_track_flush (track);
+
+  gst_adaptive_demux_track_unref (track);
+  /* We steal the reference of pending_track */
+  track = slot->track = slot->pending_track;
+  slot->pending_track = NULL;
+  slot->track->active = TRUE;
+
+  /* Make sure the track segment will start at the current position */
+  track->update_next_segment = TRUE;
+
+  /* Send stream start and collection, and schedule sticky events */
+  gst_adaptive_demux_send_initial_events (demux, slot);
+
+  /* Can we emit the streams-selected message now ? */
+  msg =
+      all_selected_tracks_are_active (demux,
+      g_atomic_int_get (&demux->priv->requested_selection_seqnum));
+  if (msg) {
+    TRACKS_UNLOCK (demux);
+    GST_DEBUG_OBJECT (demux, "Posting streams-selected");
+    gst_element_post_message (GST_ELEMENT_CAST (demux), msg);
+    TRACKS_LOCK (demux);
+  }
+
+}
+
+static void
+gst_adaptive_demux_output_loop (GstAdaptiveDemux * demux)
+{
+  GList *tmp;
+  GstClockTimeDiff global_output_position = GST_CLOCK_STIME_NONE;
+  gboolean wait_for_data = FALSE;
+  GstFlowReturn ret;
+
+  GST_DEBUG_OBJECT (demux, "enter");
+
+  TRACKS_LOCK (demux);
+
+  /* Check if stopping */
+  if (demux->priv->flushing) {
+    ret = GST_FLOW_FLUSHING;
+    goto pause;
+  }
+
+  /* If the selection changed, handle it */
+  check_and_handle_selection_update_locked (demux);
+
+restart:
+  ret = GST_FLOW_OK;
+  global_output_position = GST_CLOCK_STIME_NONE;
+  if (wait_for_data) {
+    GST_DEBUG_OBJECT (demux, "Waiting for data");
+    g_cond_wait (&demux->priv->tracks_add, &demux->priv->tracks_lock);
+    GST_DEBUG_OBJECT (demux, "Done waiting for data");
+    if (demux->priv->flushing) {
+      ret = GST_FLOW_FLUSHING;
+      goto pause;
+    }
+    wait_for_data = FALSE;
+  }
+
+  /* Grab/Recalculate current global output position
+   * This is the minimum pending output position of all tracks used for output
+   *
+   * If there is a track which is empty and not EOS, wait for it to receive data
+   * then recalculate global output position.
+   *
+   * This also pushes downstream all non-timed data that might be present.
+   *
+   * IF all tracks are EOS : stop task
+   */
+  GST_LOG_OBJECT (demux, "Calculating global output position of output slots");
+  for (tmp = demux->priv->outputs; tmp; tmp = tmp->next) {
+    OutputSlot *slot = (OutputSlot *) tmp->data;
+    GstAdaptiveDemuxTrack *track;
+
+    /* If there is a pending track, Check if it's time to switch to it */
+    if (slot->pending_track)
+      handle_slot_pending_track_switch_locked (demux, slot);
+
+    track = slot->track;
+
+    if (!track->active) {
+      /* Note: Edward: I can't see in what cases we would end up with inactive
+         tracks assigned to slots. */
+      GST_ERROR_OBJECT (demux, "FIXME : Handle track switching");
+      g_assert (track->active);
+      continue;
+    }
+
+    if (track->next_position == GST_CLOCK_STIME_NONE) {
+      gst_adaptive_demux_track_update_next_position (track);
+    }
+
+    if (track->next_position != GST_CLOCK_STIME_NONE) {
+      if (global_output_position == GST_CLOCK_STIME_NONE)
+        global_output_position = track->next_position;
+      else
+        global_output_position =
+            MIN (global_output_position, track->next_position);
+      track->waiting_add = FALSE;
+    } else if (!track->eos) {
+      GST_DEBUG_OBJECT (demux, "Need timed data on track %s", track->stream_id);
+      wait_for_data = track->waiting_add = TRUE;
+    } else {
+      GST_DEBUG_OBJECT (demux, "Track %s is EOS, not waiting for timed data",
+          track->stream_id);
+    }
+  }
+
+  if (wait_for_data)
+    goto restart;
+
+  if (global_output_position == GST_CLOCK_STIME_NONE
+      && demux->output_period->closed) {
+    GST_DEBUG_OBJECT (demux, "Period %d is drained, switching to next period",
+        demux->output_period->period_num);
+    if (!gst_adaptive_demux_advance_output_period (demux)) {
+      /* Failed to move to next period, error out */
+      ret = GST_FLOW_ERROR;
+      goto pause;
+    }
+    /* Restart the loop */
+    goto restart;
+  }
+
+  GST_DEBUG_OBJECT (demux, "Outputting data for position %" GST_STIME_FORMAT,
+      GST_STIME_ARGS (global_output_position));
+
+  /* For each track:
+   *
+   * We know all active tracks have pending timed data
+   * * while track next_position <= global output position
+   *   * push pending data
+   *   * Update track next_position
+   *     * recalculate global output position
+   *   * Pop next pending data from track and update pending position
+   *
+   */
+  for (tmp = demux->priv->outputs; tmp; tmp = tmp->next) {
+    OutputSlot *slot = (OutputSlot *) tmp->data;
+    GstAdaptiveDemuxTrack *track = slot->track;
+
+    GST_LOG_OBJECT (track->element,
+        "active:%d draining:%d selected:%d next_position:%" GST_STIME_FORMAT
+        " global_output_position:%" GST_STIME_FORMAT, track->active,
+        track->draining, track->selected, GST_STIME_ARGS (track->next_position),
+        GST_STIME_ARGS (global_output_position));
+
+    if (!track->active)
+      continue;
+
+    while (global_output_position == GST_CLOCK_STIME_NONE
+        || !slot->pushed_timed_data
+        || ((track->next_position != GST_CLOCK_STIME_NONE)
+            && track->next_position <= global_output_position)) {
+      GstMiniObject *mo = track_dequeue_data_locked (demux, track, TRUE);
+
+      if (!mo) {
+        GST_DEBUG_OBJECT (demux,
+            "Track '%s' doesn't have any pending data (eos:%d pushed_timed_data:%d)",
+            track->stream_id, track->eos, slot->pushed_timed_data);
+        /* This should only happen if the track is EOS, or exactly in between
+         * the parser outputting segment/caps before buffers. */
+        g_assert (track->eos || !slot->pushed_timed_data);
+        break;
+      }
+
+      demux_update_buffering_locked (demux);
+      demux_post_buffering_locked (demux);
+      TRACKS_UNLOCK (demux);
+
+      GST_DEBUG_OBJECT (demux, "Track '%s' dequeued %" GST_PTR_FORMAT,
+          track->stream_id, mo);
+
+      if (GST_IS_EVENT (mo)) {
+        GstEvent *event = (GstEvent *) mo;
+        if (GST_EVENT_TYPE (event) == GST_EVENT_GAP)
+          slot->pushed_timed_data = TRUE;
+        gst_pad_push_event (slot->pad, event);
+
+        if (GST_EVENT_IS_STICKY (event))
+          gst_event_store_mark_delivered (&track->sticky_events, event);
+      } else if (GST_IS_BUFFER (mo)) {
+        GstBuffer *buffer = (GstBuffer *) mo;
+
+        if (track->output_discont) {
+          if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
+            buffer = gst_buffer_make_writable (buffer);
+            GST_DEBUG_OBJECT (slot->pad,
+                "track %s marking discont %" GST_PTR_FORMAT, track->stream_id,
+                buffer);
+            GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+          }
+          track->output_discont = FALSE;
+        }
+        slot->flow_ret = gst_pad_push (slot->pad, buffer);
+        ret =
+            gst_flow_combiner_update_pad_flow (demux->priv->flowcombiner,
+            slot->pad, slot->flow_ret);
+        GST_DEBUG_OBJECT (slot->pad,
+            "track %s push returned %s (combined %s)", track->stream_id,
+            gst_flow_get_name (slot->flow_ret), gst_flow_get_name (ret));
+        slot->pushed_timed_data = TRUE;
+      } else {
+        GST_ERROR ("Unhandled miniobject %" GST_PTR_FORMAT, mo);
+      }
+
+      TRACKS_LOCK (demux);
+      gst_adaptive_demux_track_update_next_position (track);
+
+      if (ret != GST_FLOW_OK)
+        goto pause;
+    }
+  }
+
+  /* Store global output position */
+  if (global_output_position != GST_CLOCK_STIME_NONE)
+    demux->priv->global_output_position = global_output_position;
+
+  if (global_output_position == GST_CLOCK_STIME_NONE) {
+    if (!demux->priv->flushing) {
+      GST_DEBUG_OBJECT (demux,
+          "Pausing output task after reaching NONE global_output_position");
+      gst_task_pause (demux->priv->output_task);
+    }
+  }
+
+  TRACKS_UNLOCK (demux);
+  GST_DEBUG_OBJECT (demux, "leave");
+  return;
+
+pause:
+  {
+    GST_DEBUG_OBJECT (demux, "Pausing due to %s", gst_flow_get_name (ret));
+    /* If the flushing flag is set, then the task is being
+     * externally stopped, so don't go to pause(), otherwise we
+     * should so we don't keep spinning */
+    if (!demux->priv->flushing) {
+      GST_DEBUG_OBJECT (demux, "Pausing task due to %s",
+          gst_flow_get_name (ret));
+      gst_task_pause (demux->priv->output_task);
+    }
+
+    TRACKS_UNLOCK (demux);
+
+    if (ret == GST_FLOW_NOT_LINKED || ret <= GST_FLOW_EOS) {
+      GstEvent *eos = gst_event_new_eos ();
+
+      if (ret != GST_FLOW_EOS) {
+        GST_ELEMENT_FLOW_ERROR (demux, ret);
+      }
+
+      GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+      if (demux->priv->segment_seqnum != GST_SEQNUM_INVALID)
+        gst_event_set_seqnum (eos, demux->priv->segment_seqnum);
+      GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+      gst_adaptive_demux_push_src_event (demux, eos);
+    }
+
+    return;
+  }
+}
+
+/* must be called from the scheduler */
+gboolean
+gst_adaptive_demux_is_live (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  if (klass->is_live)
+    return klass->is_live (demux);
+  return FALSE;
+}
+
+/* must be called from the scheduler */
+GstFlowReturn
+gst_adaptive_demux2_stream_seek (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, gboolean forward, GstSeekFlags flags,
+    GstClockTimeDiff ts, GstClockTimeDiff * final_ts)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  if (klass->stream_seek)
+    return klass->stream_seek (stream, forward, flags, ts, final_ts);
+  return GST_FLOW_ERROR;
+}
+
+/* must be called from the scheduler */
+gboolean
+gst_adaptive_demux2_stream_has_next_fragment (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  gboolean ret = TRUE;
+
+  if (klass->stream_has_next_fragment)
+    ret = klass->stream_has_next_fragment (stream);
+
+  return ret;
+}
+
+/* must be called from the scheduler */
+/* Called from:
+ *  the ::finish_fragment() handlers when an *actual* fragment is done
+ *   */
+GstFlowReturn
+gst_adaptive_demux2_stream_advance_fragment (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstClockTime duration)
+{
+  if (stream->last_ret != GST_FLOW_OK)
+    return stream->last_ret;
+
+  stream->last_ret =
+      gst_adaptive_demux2_stream_advance_fragment_unlocked (demux, stream,
+      duration);
+
+  return stream->last_ret;
+}
+
+/* must be called with manifest_lock taken */
+static GstFlowReturn
+gst_adaptive_demux2_stream_advance_fragment_unlocked (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstClockTime duration)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  GstFlowReturn ret;
+
+  g_return_val_if_fail (klass->stream_advance_fragment != NULL, GST_FLOW_ERROR);
+
+  GST_LOG_OBJECT (stream,
+      "stream_time %" GST_STIME_FORMAT " duration:%" GST_TIME_FORMAT,
+      GST_STIME_ARGS (stream->fragment.stream_time), GST_TIME_ARGS (duration));
+
+  stream->download_error_count = 0;
+  g_clear_error (&stream->last_error);
+
+#if 0
+  /* FIXME - url has no indication of byte ranges for subsegments */
+  /* FIXME: Reenable statistics sending? */
+  gst_element_post_message (GST_ELEMENT_CAST (demux),
+      gst_message_new_element (GST_OBJECT_CAST (demux),
+          gst_structure_new (GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME,
+              "manifest-uri", G_TYPE_STRING,
+              demux->manifest_uri, "uri", G_TYPE_STRING,
+              stream->fragment.uri, "fragment-start-time",
+              GST_TYPE_CLOCK_TIME, stream->download_start_time,
+              "fragment-stop-time", GST_TYPE_CLOCK_TIME,
+              gst_util_get_timestamp (), "fragment-size", G_TYPE_UINT64,
+              stream->download_total_bytes, "fragment-download-time",
+              GST_TYPE_CLOCK_TIME, stream->last_download_time, NULL)));
+#endif
+
+  /* Don't update to the end of the segment if in reverse playback */
+  GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+  if (GST_CLOCK_TIME_IS_VALID (duration) && demux->segment.rate > 0) {
+    stream->parse_segment.position += duration;
+    stream->current_position += duration;
+
+    GST_DEBUG_OBJECT (stream,
+        "stream position now %" GST_TIME_FORMAT,
+        GST_TIME_ARGS (stream->current_position));
+  }
+  GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+
+  /* When advancing with a non 1.0 rate on live streams, we need to check
+   * the live seeking range again to make sure we can still advance to
+   * that position */
+  if (demux->segment.rate != 1.0 && gst_adaptive_demux_is_live (demux)) {
+    if (!gst_adaptive_demux2_stream_in_live_seek_range (demux, stream))
+      ret = GST_FLOW_EOS;
+    else
+      ret = klass->stream_advance_fragment (stream);
+  } else if (gst_adaptive_demux_is_live (demux)
+      || gst_adaptive_demux2_stream_has_next_fragment (demux, stream)) {
+    ret = klass->stream_advance_fragment (stream);
+  } else {
+    ret = GST_FLOW_EOS;
+  }
+
+  stream->download_start_time =
+      GST_TIME_AS_USECONDS (gst_adaptive_demux2_get_monotonic_time (demux));
+
+  if (ret == GST_FLOW_OK) {
+    GST_DEBUG_OBJECT (stream, "checking if stream requires bitrate change");
+    if (gst_adaptive_demux2_stream_select_bitrate (demux, stream,
+            gst_adaptive_demux2_stream_update_current_bitrate (demux,
+                stream))) {
+      GST_DEBUG_OBJECT (stream, "Bitrate changed. Returning FLOW_SWITCH");
+      stream->need_header = TRUE;
+      ret = (GstFlowReturn) GST_ADAPTIVE_DEMUX_FLOW_SWITCH;
+    }
+  }
+
+  return ret;
+}
+
+/* must be called with manifest_lock taken */
+static gboolean
+gst_adaptive_demux2_stream_select_bitrate (GstAdaptiveDemux *
+    demux, GstAdaptiveDemux2Stream * stream, guint64 bitrate)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  if (klass->stream_select_bitrate)
+    return klass->stream_select_bitrate (stream, bitrate);
+  return FALSE;
+}
+
+/* must be called with manifest_lock taken */
+GstFlowReturn
+gst_adaptive_demux2_stream_update_fragment_info (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  GstFlowReturn ret;
+
+  g_return_val_if_fail (klass->stream_update_fragment_info != NULL,
+      GST_FLOW_ERROR);
+
+  /* Make sure the sub-class will update bitrate, or else
+   * we will later */
+  stream->fragment.finished = FALSE;
+
+  GST_LOG_OBJECT (stream, "position %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (stream->current_position));
+
+  ret = klass->stream_update_fragment_info (stream);
+
+  GST_LOG_OBJECT (stream, "ret:%s uri:%s",
+      gst_flow_get_name (ret), stream->fragment.uri);
+  if (ret == GST_FLOW_OK) {
+    GST_LOG_OBJECT (stream,
+        "stream_time %" GST_STIME_FORMAT " duration:%" GST_TIME_FORMAT,
+        GST_STIME_ARGS (stream->fragment.stream_time),
+        GST_TIME_ARGS (stream->fragment.duration));
+    GST_LOG_OBJECT (stream,
+        "range start:%" G_GINT64_FORMAT " end:%" G_GINT64_FORMAT,
+        stream->fragment.range_start, stream->fragment.range_end);
+  }
+
+  return ret;
+}
+
+/* must be called with manifest_lock taken */
+GstClockTime
+gst_adaptive_demux2_stream_get_fragment_waiting_time (GstAdaptiveDemux *
+    demux, GstAdaptiveDemux2Stream * stream)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+
+  if (klass->stream_get_fragment_waiting_time)
+    return klass->stream_get_fragment_waiting_time (stream);
+  return 0;
+}
+
+static void
+handle_manifest_download_complete (DownloadRequest * request,
+    DownloadRequestState state, GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  GstBuffer *buffer;
+  GstFlowReturn result;
+
+  g_free (demux->manifest_base_uri);
+  g_free (demux->manifest_uri);
+
+  if (request->redirect_permanent && request->redirect_uri) {
+    demux->manifest_uri = g_strdup (request->redirect_uri);
+    demux->manifest_base_uri = NULL;
+  } else {
+    demux->manifest_uri = g_strdup (request->uri);
+    demux->manifest_base_uri = g_strdup (request->redirect_uri);
+  }
+
+  buffer = download_request_take_buffer (request);
+
+  /* We should always have a buffer since this function is the non-error
+   * callback for the download */
+  g_assert (buffer);
+
+  result = klass->update_manifest_data (demux, buffer);
+  gst_buffer_unref (buffer);
+
+  /* FIXME: Should the manifest uri vars be reverted to original
+   * values if updating fails? */
+
+  if (result == GST_FLOW_OK) {
+    GstClockTime duration;
+    /* Send an updated duration message */
+    duration = klass->get_duration (demux);
+    if (duration != GST_CLOCK_TIME_NONE) {
+      GST_DEBUG_OBJECT (demux,
+          "Sending duration message : %" GST_TIME_FORMAT,
+          GST_TIME_ARGS (duration));
+      gst_element_post_message (GST_ELEMENT (demux),
+          gst_message_new_duration_changed (GST_OBJECT (demux)));
+    } else {
+      GST_DEBUG_OBJECT (demux,
+          "Duration unknown, can not send the duration message");
+    }
+
+    /* If a manifest changes it's liveness or periodic updateness, we need
+     * to start/stop the manifest update task appropriately */
+    /* Keep this condition in sync with the one in
+     * gst_adaptive_demux_start_manifest_update_task()
+     */
+    if (gst_adaptive_demux_is_live (demux) &&
+        klass->requires_periodical_playlist_update (demux)) {
+      gst_adaptive_demux_start_manifest_update_task (demux);
+    } else {
+      gst_adaptive_demux_stop_manifest_update_task (demux);
+    }
+  }
+}
+
+static void
+handle_manifest_download_failure (DownloadRequest * request,
+    DownloadRequestState state, GstAdaptiveDemux * demux)
+{
+  GST_FIXME_OBJECT (demux, "Manifest download failed.");
+  /* Retry or error out here */
+}
+
+static GstFlowReturn
+gst_adaptive_demux_update_manifest_default (GstAdaptiveDemux * demux)
+{
+  DownloadRequest *request;
+  GstFlowReturn ret = GST_FLOW_OK;
+  GError *error = NULL;
+
+  request = download_request_new_uri (demux->manifest_uri);
+
+  download_request_set_callbacks (request,
+      (DownloadRequestEventCallback) handle_manifest_download_complete,
+      (DownloadRequestEventCallback) handle_manifest_download_failure,
+      NULL, NULL, demux);
+
+  if (!downloadhelper_submit_request (demux->download_helper, NULL,
+          DOWNLOAD_FLAG_COMPRESS | DOWNLOAD_FLAG_FORCE_REFRESH, request,
+          &error)) {
+    if (error) {
+      GST_ELEMENT_WARNING (demux, RESOURCE, FAILED,
+          ("Failed to download manifest: %s", error->message), (NULL));
+      g_clear_error (&error);
+    }
+    ret = GST_FLOW_NOT_LINKED;
+  }
+
+  return ret;
+}
+
+/* must be called with manifest_lock taken */
+GstFlowReturn
+gst_adaptive_demux_update_manifest (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  GstFlowReturn ret;
+
+  ret = klass->update_manifest (demux);
+
+  return ret;
+}
+
+void
+gst_adaptive_demux2_stream_fragment_clear (GstAdaptiveDemux2StreamFragment * f)
+{
+  g_free (f->uri);
+  f->uri = NULL;
+  f->range_start = 0;
+  f->range_end = -1;
+
+  g_free (f->header_uri);
+  f->header_uri = NULL;
+  f->header_range_start = 0;
+  f->header_range_end = -1;
+
+  g_free (f->index_uri);
+  f->index_uri = NULL;
+  f->index_range_start = 0;
+  f->index_range_end = -1;
+
+  f->stream_time = GST_CLOCK_STIME_NONE;
+  f->duration = GST_CLOCK_TIME_NONE;
+  f->finished = FALSE;
+}
+
+/* must be called with manifest_lock taken */
+gboolean
+gst_adaptive_demux_has_next_period (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  gboolean ret = FALSE;
+
+  if (klass->has_next_period)
+    ret = klass->has_next_period (demux);
+  GST_DEBUG_OBJECT (demux, "Has next period: %d", ret);
+  return ret;
+}
+
+/* must be called with manifest_lock taken */
+void
+gst_adaptive_demux_advance_period (GstAdaptiveDemux * demux)
+{
+  GstAdaptiveDemuxClass *klass = GST_ADAPTIVE_DEMUX_GET_CLASS (demux);
+  GstAdaptiveDemuxPeriod *previous_period = demux->input_period;
+
+  g_return_if_fail (klass->advance_period != NULL);
+
+  GST_DEBUG_OBJECT (demux, "Advancing to next period");
+  /* FIXME : no return value ? What if it fails ? */
+  klass->advance_period (demux);
+
+  if (previous_period == demux->input_period) {
+    GST_ERROR_OBJECT (demux, "Advancing period failed");
+    return;
+  }
+
+  /* Stop the previous period stream tasks */
+  gst_adaptive_demux_period_stop_tasks (previous_period);
+
+  gst_adaptive_demux_update_collection (demux, demux->input_period);
+  /* Figure out a pre-emptive selection based on the output period selection */
+  gst_adaptive_demux_period_transfer_selection (demux, demux->input_period,
+      demux->output_period);
+
+  gst_adaptive_demux_prepare_streams (demux, FALSE);
+  gst_adaptive_demux_start_tasks (demux);
+}
+
+/**
+ * gst_adaptive_demux_get_monotonic_time:
+ * Returns: a monotonically increasing time, using the system realtime clock
+ */
+GstClockTime
+gst_adaptive_demux2_get_monotonic_time (GstAdaptiveDemux * demux)
+{
+  g_return_val_if_fail (demux != NULL, GST_CLOCK_TIME_NONE);
+  return gst_adaptive_demux_clock_get_time (demux->realtime_clock);
+}
+
+/**
+ * gst_adaptive_demux_get_client_now_utc:
+ * @demux: #GstAdaptiveDemux
+ * Returns: the client's estimate of UTC
+ *
+ * Used to find the client's estimate of UTC, using the system realtime clock.
+ */
+GDateTime *
+gst_adaptive_demux2_get_client_now_utc (GstAdaptiveDemux * demux)
+{
+  return gst_adaptive_demux_clock_get_now_utc (demux->realtime_clock);
+}
+
+/**
+ * gst_adaptive_demux_is_running
+ * @demux: #GstAdaptiveDemux
+ * Returns: whether the demuxer is processing data
+ *
+ * Returns FALSE if shutdown has started (transitioning down from
+ * PAUSED), otherwise TRUE.
+ */
+gboolean
+gst_adaptive_demux2_is_running (GstAdaptiveDemux * demux)
+{
+  return g_atomic_int_get (&demux->running);
+}
+
+/**
+ * gst_adaptive_demux_get_qos_earliest_time:
+ *
+ * Returns: The QOS earliest time
+ *
+ * Since: 1.20
+ */
+GstClockTime
+gst_adaptive_demux2_get_qos_earliest_time (GstAdaptiveDemux * demux)
+{
+  GstClockTime earliest;
+
+  GST_OBJECT_LOCK (demux);
+  earliest = demux->priv->qos_earliest_time;
+  GST_OBJECT_UNLOCK (demux);
+
+  return earliest;
+}
+
+gboolean
+gst_adaptive_demux2_add_stream (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  g_return_val_if_fail (demux && stream, FALSE);
+
+  /* FIXME : Migrate to parent */
+  g_return_val_if_fail (stream->demux == NULL, FALSE);
+
+  GST_DEBUG_OBJECT (demux, "Adding stream %s", GST_OBJECT_NAME (stream));
+
+  TRACKS_LOCK (demux);
+  if (demux->input_period->prepared) {
+    GST_ERROR_OBJECT (demux,
+        "Attempted to add streams but no new period was created");
+    TRACKS_UNLOCK (demux);
+    return FALSE;
+  }
+  stream->demux = demux;
+  stream->period = demux->input_period;
+  demux->input_period->streams =
+      g_list_append (demux->input_period->streams, stream);
+
+  if (stream->tracks) {
+    GList *iter;
+    for (iter = stream->tracks; iter; iter = iter->next)
+      if (!gst_adaptive_demux_period_add_track (demux->input_period,
+              (GstAdaptiveDemuxTrack *) iter->data)) {
+        GST_ERROR_OBJECT (demux, "Failed to add track elements");
+        TRACKS_UNLOCK (demux);
+        return FALSE;
+      }
+  }
+  TRACKS_UNLOCK (demux);
+  return TRUE;
+}
+
+/* Return the current playback rate including any instant rate multiplier */
+gdouble
+gst_adaptive_demux_play_rate (GstAdaptiveDemux * demux)
+{
+  gdouble rate;
+  GST_ADAPTIVE_DEMUX_SEGMENT_LOCK (demux);
+  rate = demux->segment.rate * demux->instant_rate_multiplier;
+  GST_ADAPTIVE_DEMUX_SEGMENT_UNLOCK (demux);
+  return rate;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemux.h
new file mode 100644 (file)
index 0000000..ceeaa22
--- /dev/null
@@ -0,0 +1,786 @@
+/* GStreamer
+ *
+ * Copyright (C) 2014 Samsung Electronics. All rights reserved.
+ *   Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * Copyright (C) 2021-2022 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef _GST_ADAPTIVE_DEMUX_H_
+#define _GST_ADAPTIVE_DEMUX_H_
+
+#include <gst/gst.h>
+#include <gst/base/gstqueuearray.h>
+#include <gst/app/gstappsrc.h>
+#include "downloadhelper.h"
+#include "downloadrequest.h"
+
+#include "gstadaptivedemuxutils.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_ADAPTIVE_DEMUX \
+  (gst_adaptive_demux_ng_get_type())
+#define GST_ADAPTIVE_DEMUX(obj) \
+  (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ADAPTIVE_DEMUX,GstAdaptiveDemux))
+#define GST_ADAPTIVE_DEMUX_CLASS(klass) \
+  (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ADAPTIVE_DEMUX,GstAdaptiveDemuxClass))
+#define GST_ADAPTIVE_DEMUX_GET_CLASS(obj) \
+  (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_ADAPTIVE_DEMUX,GstAdaptiveDemuxClass))
+#define GST_IS_ADAPTIVE_DEMUX(obj) \
+  (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ADAPTIVE_DEMUX))
+#define GST_IS_ADAPTIVE_DEMUX_CLASS(obj) \
+  (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ADAPTIVE_DEMUX))
+#define GST_ADAPTIVE_DEMUX_CAST(obj) ((GstAdaptiveDemux *)obj)
+
+#define GST_TYPE_ADAPTIVE_DEMUX2_STREAM \
+  (gst_adaptive_demux2_stream_get_type())
+#define GST_ADAPTIVE_DEMUX2_STREAM(obj) \
+  (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ADAPTIVE_DEMUX2_STREAM,GstAdaptiveDemux2Stream))
+#define GST_ADAPTIVE_DEMUX2_STREAM_CAST(obj) ((GstAdaptiveDemux2Stream *)obj)
+
+typedef struct _GstAdaptiveDemux2Stream GstAdaptiveDemux2Stream;
+typedef GstObjectClass GstAdaptiveDemux2StreamClass;
+
+
+/**
+ * GST_ADAPTIVE_DEMUX_SINK_NAME:
+ *
+ * The name of the templates for the sink pad.
+ */
+#define GST_ADAPTIVE_DEMUX_SINK_NAME    "sink"
+
+/**
+ * GST_ADAPTIVE_DEMUX_SINK_PAD:
+ * @obj: a #GstAdaptiveDemux
+ *
+ * Gives the pointer to the sink #GstPad object of the element.
+ */
+#define GST_ADAPTIVE_DEMUX_SINK_PAD(obj)        (((GstAdaptiveDemux *) (obj))->sinkpad)
+
+#define GST_ADAPTIVE_DEMUX_IN_TRICKMODE_KEY_UNITS(obj) ((((GstAdaptiveDemux*)(obj))->segment.flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS) == GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS)
+
+#define GST_ADAPTIVE_DEMUX2_STREAM_NEED_HEADER(obj) (((GstAdaptiveDemux2Stream *) (obj))->need_header)
+
+/**
+ * GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME:
+ *
+ * Name of the ELEMENT type messages posted by dashdemux with statistics.
+ *
+ * Since: 1.6
+ */
+#define GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME "adaptive-streaming-statistics"
+
+#define GST_ELEMENT_ERROR_FROM_ERROR(el, msg, err) G_STMT_START { \
+  gchar *__dbg = g_strdup_printf ("%s: %s", msg, err->message);         \
+  GST_WARNING_OBJECT (el, "error: %s", __dbg);                          \
+  gst_element_message_full (GST_ELEMENT(el), GST_MESSAGE_ERROR,         \
+    err->domain, err->code,                                             \
+    NULL, __dbg, __FILE__, GST_FUNCTION, __LINE__);                     \
+  g_clear_error (&err); \
+} G_STMT_END
+
+/* DEPRECATED */
+#define GST_ADAPTIVE_DEMUX_FLOW_END_OF_FRAGMENT GST_FLOW_CUSTOM_SUCCESS_1
+
+/* Current fragment download should be aborted and restarted. The parent class
+ * will call ::update_fragment_info() again to get the updated information.
+ */
+#define GST_ADAPTIVE_DEMUX_FLOW_RESTART_FRAGMENT GST_FLOW_CUSTOM_SUCCESS_2
+
+typedef enum _GstAdaptiveDemux2StreamState GstAdaptiveDemux2StreamState;
+
+typedef struct _GstAdaptiveDemux2StreamFragment GstAdaptiveDemux2StreamFragment;
+typedef struct _GstAdaptiveDemuxTrack GstAdaptiveDemuxTrack;
+typedef struct _GstAdaptiveDemuxPeriod GstAdaptiveDemuxPeriod;
+typedef struct _GstAdaptiveDemux GstAdaptiveDemux;
+typedef struct _GstAdaptiveDemuxClass GstAdaptiveDemuxClass;
+typedef struct _GstAdaptiveDemuxPrivate GstAdaptiveDemuxPrivate;
+
+struct _GstAdaptiveDemux2StreamFragment
+{
+  /* The period-local stream time for the given fragment. */
+  GstClockTimeDiff stream_time;
+  GstClockTime duration;
+
+  gchar *uri;
+  gint64 range_start;
+  gint64 range_end;
+
+  /* when chunked downloading is used, may be be updated need_another_chunk() */
+  gint chunk_size;
+
+  /* when headers are needed */
+  gchar *header_uri;
+  gint64 header_range_start;
+  gint64 header_range_end;
+
+  /* when index is needed */
+  gchar *index_uri;
+  gint64 index_range_start;
+  gint64 index_range_end;
+
+  gboolean finished;
+};
+
+struct _GstAdaptiveDemuxTrack
+{
+  gint ref_count;
+
+  /* Demux */
+  GstAdaptiveDemux *demux;
+
+  /* Stream type */
+  GstStreamType type;
+
+  /* Stream flags */
+  GstStreamFlags flags;
+
+  /* Unique identifier */
+  gchar *stream_id;
+
+  /* Unique identifier of the internal stream produced
+   * by parsebin for the Stream this track comes from */
+  gchar *upstream_stream_id;
+
+  /* Generic *elementary stream* caps */
+  GstCaps *generic_caps;
+
+  /* Generic metadata */
+  GstTagList *tags;
+
+  /* The stream object */
+  GstStream *stream_object;
+
+  /* If TRUE, this track should be filled */
+  gboolean selected;
+
+  /* If TRUE, this track is currently being outputted */
+  gboolean active;
+
+  /* If TRUE, it is no longer selected but still being outputted. */
+  gboolean draining;
+
+  /* FIXME : Replace by actual track element */
+  GstElement *element;
+
+  /* The level at which 100% buffering is achieved */
+  GstClockTime buffering_threshold;
+
+  /* The sinkpad receives parsed elementary stream */
+  GstPad *sinkpad;
+
+  /* The pending parsebin source pad (used in case streams from parsebin get updated) (ref taken) */
+  GstPad *pending_srcpad;
+
+  /* Data storage */
+  GstQueueArray *queue;
+
+  /* Sticky event storage for this track */
+  GstEventStore sticky_events;
+
+  /* ============== */
+  /* Input tracking */
+
+  /* The track received EOS */
+  gboolean eos;
+
+  /* Level to wait until download can commence */
+  GstClockTime waiting_del_level;
+
+  /* Input segment and time (in running time) */
+  GstSegment input_segment;
+  GstClockTimeDiff input_time;
+  guint64 input_segment_seqnum;
+
+  /* ================= */
+  /* Contents tracking */
+
+  /* Current level of queue in bytes and time */
+  guint64 level_bytes;
+  GstClockTime level_time;
+
+  /* =============== */
+  /* Output tracking */
+
+  /* Is the output thread waiting for data on this track ? */
+  gboolean waiting_add;
+
+  /* If TRUE, the next pending GstSegment running time should be updated to the
+   * time stored in update_next_segment_run_ts */
+  gboolean update_next_segment;
+
+  /* Output segment and time (in running time) */
+  GstSegment output_segment;
+  GstClockTimeDiff output_time;
+
+  /* Track position and duration for emitting gap
+   * events */
+  GstClockTime gap_position;
+  GstClockTime gap_duration;
+
+  /* Next running time position pending in queue */
+  GstClockTimeDiff next_position;
+
+  /* If the next output buffer should be marked discont */
+  gboolean output_discont;
+};
+
+enum _GstAdaptiveDemux2StreamState {
+  GST_ADAPTIVE_DEMUX2_STREAM_STATE_STOPPED, /* Stream was stopped */
+  GST_ADAPTIVE_DEMUX2_STREAM_STATE_RESTART, /* Stream stopped but needs restart logic */
+  GST_ADAPTIVE_DEMUX2_STREAM_STATE_START_FRAGMENT,
+  GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_LIVE,
+  GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_OUTPUT_SPACE,
+  GST_ADAPTIVE_DEMUX2_STREAM_STATE_WAITING_MANIFEST_UPDATE,
+  GST_ADAPTIVE_DEMUX2_STREAM_STATE_DOWNLOADING,
+  GST_ADAPTIVE_DEMUX2_STREAM_STATE_EOS,
+  GST_ADAPTIVE_DEMUX2_STREAM_STATE_ERRORED
+};
+
+struct _GstAdaptiveDemux2Stream
+{
+  GstObject object;
+
+  /* FIXME : transition to gstobject->parent */
+  GstAdaptiveDemux *demux;
+
+  /* The period to which the stream belongs, set when adding the stream to the
+   * demuxer */
+  GstAdaptiveDemuxPeriod *period;
+  
+  /* The tracks this stream targets */
+  GList *tracks;
+
+  /* The internal parsebin, forward data to track */
+  GstElement *parsebin;
+  GstPad *parsebin_sink;
+
+  gulong pad_added_id, pad_removed_id;
+  
+  GstSegment parse_segment;
+
+  /* TRUE if the current stream GstSegment should be sent downstream */
+  gboolean send_segment;
+  /* TRUE if the stream GstSegment requires recalculation (from demuxer
+     segment) */
+  gboolean compute_segment;
+  /* first_and_live applies to compute_segment */
+  gboolean first_and_live;
+
+  /* When restarting, what is the target position (in demux segment) to
+   * begin at */
+  GstClockTime start_position;
+
+  /* Track the current position (in demux segment) of the current fragment */
+  GstClockTime current_position;
+
+  GstCaps *pending_caps;
+  GstTagList *pending_tags;
+
+  GList *pending_events;
+
+  GstFlowReturn last_ret;
+  GError *last_error;
+
+  gboolean discont;
+
+  /* download tooling */
+  gboolean need_header;
+  gboolean need_index;
+
+  gboolean downloading_header;
+  gboolean downloading_index;
+
+  /* persistent, reused download request for fragment data */
+  DownloadRequest *download_request;
+
+  GstAdaptiveDemux2StreamState state;
+  guint pending_cb_id;
+  gboolean download_active;
+
+  guint last_status_code;
+
+  gboolean pending_tracks; /* if we need to discover tracks dynamically for this stream */
+  gboolean download_finished;
+  gboolean cancelled;
+  gboolean replaced; /* replaced in a bitrate switch (used with cancelled) */
+
+  gboolean starting_fragment;
+  gboolean first_fragment_buffer;
+  gint64 download_start_time;
+  gint64 download_total_bytes;
+  gint64 download_end_offset;
+  guint64 current_download_rate;
+
+  /* bitrate of the previous fragment (pre-queue2) */
+  guint64 last_bitrate;
+
+  /* Total last download time, from request to completion */
+  GstClockTime last_download_time;
+
+  /* Average for the last fragments */
+  guint64 moving_bitrate;
+  guint moving_index;
+  guint64 *fragment_bitrates;
+
+  GstAdaptiveDemux2StreamFragment fragment;
+
+  guint download_error_count;
+
+  /* Last collection provided by parsebin */
+  GstStreamCollection *stream_collection;
+
+  /* OR'd set of stream types in this stream */
+  GstStreamType stream_type;
+};
+
+/**
+ * GstAdaptiveDemuxPeriod:
+ *
+ * The opaque #GstAdaptiveDemuxPeriod data structure. */
+struct _GstAdaptiveDemuxPeriod
+{
+  gint ref_count;
+
+  GstAdaptiveDemux *demux;
+  
+  /* TRUE if the streams of this period were prepared and can be started */
+  gboolean prepared;
+
+
+  /* TRUE if the period no longer receives any data (i.e. it is closed) */
+  gboolean closed;
+  
+  /* An increasing unique identifier for the period.
+   *
+   * Note: unrelated to dash period id (which can be identical across
+   * periods) */
+  guint period_num;
+  
+  /* The list of GstAdaptiveDemux2Stream (ref hold) */
+  GList *streams;
+
+  /* Current collection */
+  GstStreamCollection *collection;
+
+  /* List of available GstAdaptiveDemuxTrack (ref hold) */
+  GList *tracks;
+
+  /* Whether tracks were changed and need re-matching against outputs */
+  gboolean tracks_changed;
+};
+
+/**
+ * GstAdaptiveDemux:
+ *
+ * The opaque #GstAdaptiveDemux data structure.
+ */
+struct _GstAdaptiveDemux
+{
+  /*< private >*/
+  GstBin     bin;
+
+  gint running;
+
+  /*< protected >*/
+  GstPad         *sinkpad;
+
+  DownloadHelper *download_helper;
+
+  /* Protected by TRACKS_LOCK */
+  /* Period used for output */
+  GstAdaptiveDemuxPeriod *output_period;
+
+  /* Period used for input */
+  GstAdaptiveDemuxPeriod *input_period;
+  
+  GstSegment segment;
+  gdouble instant_rate_multiplier; /* 1.0 by default, or from instant-rate seek */
+
+  gchar *manifest_uri;
+  gchar *manifest_base_uri;
+
+  /* Properties */
+  gfloat bandwidth_target_ratio; /* ratio of the available bitrate to use */
+  guint connection_speed; /* Available / bandwidth to use set by the application */
+  guint min_bitrate; /* Minimum bitrate to choose */
+  guint max_bitrate; /* Maximum bitrate to choose */
+
+  guint current_download_rate; /* Current estimate of download bitrate */
+
+  /* Buffering levels */
+  GstClockTime max_buffering_time;
+  GstClockTime buffering_high_watermark_time;
+  GstClockTime buffering_low_watermark_time;
+  gdouble buffering_high_watermark_fragments;
+  gdouble buffering_low_watermark_fragments;
+
+  /* video/audio buffer level as minimum of the appropriate streams */
+  GstClockTime current_level_time_video;
+  GstClockTime current_level_time_audio;
+
+  gboolean have_group_id;
+  guint group_id;
+
+  guint next_stream_id;
+
+  /* Realtime clock */
+  GstAdaptiveDemuxClock *realtime_clock;
+
+  /* < private > */
+  GstAdaptiveDemuxPrivate *priv;
+};
+
+/**
+ * GstAdaptiveDemuxClass:
+ *
+ */
+struct _GstAdaptiveDemuxClass
+{
+  /*< private >*/
+  GstBinClass bin_class;
+
+  /*< public >*/
+
+  /**
+   * process_manifest: Parse the manifest
+   * @demux: #GstAdaptiveDemux
+   * @manifest: the manifest to be parsed
+   *
+   * Parse the manifest and add the created streams using
+   * gst_adaptive_demux2_stream_new()
+   *
+   * Returns: %TRUE if successful
+   */
+  gboolean      (*process_manifest) (GstAdaptiveDemux * demux, GstBuffer * manifest);
+
+  /**
+   * get_manifest_update_interval:
+   * @demux: #GstAdaptiveDemux
+   *
+   * Used during live streaming, the subclass should return the interval
+   * between successive manifest updates
+   *
+   * Returns: the update interval in microseconds
+   */
+  gint64        (*get_manifest_update_interval) (GstAdaptiveDemux * demux);
+
+  /**
+   * update_manifest:
+   * @demux: #GstAdaptiveDemux
+   *
+   * During live streaming, this will be called for the subclass to update its
+   * manifest with the new version. By default it fetches the manifest URI
+   * and passes it to GstAdaptiveDemux::update_manifest_data().
+   *
+   * Returns: #GST_FLOW_OK is all succeeded, #GST_FLOW_EOS if the stream ended
+   *          or #GST_FLOW_ERROR if an error happened
+   */
+  GstFlowReturn (*update_manifest) (GstAdaptiveDemux * demux);
+
+  /**
+   * update_manifest_data:
+   * @demux: #GstAdaptiveDemux
+   * @buf: Downloaded manifest data
+   *
+   * During live streaming, this will be called for the subclass to update its
+   * manifest with the new version
+   *
+   * Returns: #GST_FLOW_OK is all succeeded, #GST_FLOW_EOS if the stream ended
+   *          or #GST_FLOW_ERROR if an error happened
+   */
+  GstFlowReturn (*update_manifest_data) (GstAdaptiveDemux * demux, GstBuffer * buf);
+
+  gboolean      (*is_live)          (GstAdaptiveDemux * demux);
+  GstClockTime  (*get_duration)     (GstAdaptiveDemux * demux);
+
+  /**
+   * reset:
+   * @demux: #GstAdaptiveDemux
+   *
+   * Reset the internal state of the subclass, getting ready to restart with
+   * a new stream afterwards
+   */
+  void          (*reset)            (GstAdaptiveDemux * demux);
+
+  /**
+   * seek:
+   * @demux: #GstAdaptiveDemux
+   * @seek: a seek #GstEvent
+   *
+   * The demuxer should seek on all its streams to the specified position
+   * in the seek event
+   *
+   * Returns: %TRUE if successful
+   */
+  gboolean      (*seek)             (GstAdaptiveDemux * demux, GstEvent * seek);
+
+  /**
+   * has_next_period:
+   * @demux: #GstAdaptiveDemux
+   *
+   * Checks if there is a next period following the current one.
+   * DASH can have multiple medias chained in its manifest, when one finishes
+   * this function is called to verify if there is a new period to be played
+   * in sequence.
+   *
+   * Returns: %TRUE if there is another period
+   */
+  gboolean      (*has_next_period)  (GstAdaptiveDemux * demux);
+  /**
+   * advance_period:
+   * @demux: #GstAdaptiveDemux
+   *
+   * Advances the manifest to the next period. New streams should be created
+   * using gst_adaptive_demux2_stream_new().
+   */
+  void          (*advance_period)  (GstAdaptiveDemux * demux);
+
+  GstFlowReturn (*stream_seek)     (GstAdaptiveDemux2Stream * stream,
+                                   gboolean                 forward,
+                                   GstSeekFlags             flags,
+                                   GstClockTimeDiff         target_ts,
+                                   GstClockTimeDiff       * final_ts);
+  gboolean      (*stream_has_next_fragment)  (GstAdaptiveDemux2Stream * stream);
+  GstFlowReturn (*stream_advance_fragment) (GstAdaptiveDemux2Stream * stream);
+
+  /**
+   * stream_can_start:
+   * @demux: The #GstAdaptiveDemux
+   * @stream: a #GstAdaptiveDemux2Stream
+   *
+   * Called before starting a @stream. sub-classes can return %FALSE if more
+   * information is required before it can be started. Sub-classes will have to
+   * call gst_adaptive_demux2_stream_start() when the stream should be started.
+   */
+  gboolean      (*stream_can_start) (GstAdaptiveDemux *demux,
+                                    GstAdaptiveDemux2Stream *stream);
+
+  /**
+   * stream_update_tracks:
+   * @demux: The #GstAdaptiveDemux
+   * @stream: A #GstAdaptiveDemux2Stream
+   *
+   * Called whenever the base class collected a @collection on a @stream which has
+   * pending tracks to be created. Subclasses should override this if they
+   * create streams without tracks.
+   *
+   * * create the various tracks by analyzing the @stream stream_collection
+   * * Set the track upstream_stream_id to the corresponding stream_id from the collection
+   */
+  void          (*stream_update_tracks) (GstAdaptiveDemux *demux,
+                                        GstAdaptiveDemux2Stream *stream);
+  /**
+   * need_another_chunk:
+   * @stream: #GstAdaptiveDemux2Stream
+   *
+   * If chunked downloading is used (chunk_size != 0) this is called once a
+   * chunk is finished to decide whether more has to be downloaded or not.
+   * May update chunk_size to a different value
+   */
+  gboolean      (*need_another_chunk) (GstAdaptiveDemux2Stream * stream);
+
+  /**
+   * stream_update_fragment_info:
+   * @stream: #GstAdaptiveDemux2Stream
+   *
+   * Requests the stream to set the information about the current fragment to its
+   * current fragment struct
+   *
+   * Returns: #GST_FLOW_OK in success, #GST_FLOW_ERROR on error and #GST_FLOW_EOS
+   *          if there is no fragment.
+   */
+  GstFlowReturn (*stream_update_fragment_info) (GstAdaptiveDemux2Stream * stream);
+  /**
+   * stream_select_bitrate:
+   * @stream: #GstAdaptiveDemux2Stream
+   * @bitrate: the bitrate to select (in bytes per second)
+   *
+   * The stream should try to select the bitrate that is the greater, but not
+   * greater than the requested bitrate. If it needs a codec change it should
+   * create the new stream using gst_adaptive_demux2_stream_new(). If it only
+   * needs a caps change it should set the new caps using
+   * gst_adaptive_demux2_stream_set_caps().
+   *
+   * Returns: %TRUE if the stream changed bitrate, %FALSE otherwise
+   */
+  gboolean      (*stream_select_bitrate) (GstAdaptiveDemux2Stream * stream, guint64 bitrate);
+  /**
+   * stream_get_fragment_waiting_time:
+   * @stream: #GstAdaptiveDemux2Stream
+   *
+   * For live streams, requests how much time should be waited before starting
+   * to download the fragment. This is useful to avoid downloading a fragment that
+   * isn't available yet.
+   *
+   * Returns: The waiting time in as a #GstClockTime
+   */
+  GstClockTime (*stream_get_fragment_waiting_time) (GstAdaptiveDemux2Stream * stream);
+
+  /**
+   * start_fragment:
+   * @demux: #GstAdaptiveDemux
+   * @stream: #GstAdaptiveDemux2Stream
+   *
+   * Notifies the subclass that the given stream is starting the download
+   * of a new fragment. Can be used to reset/init internal state that is
+   * needed before each fragment, like decryption engines.
+   *
+   * Returns: %TRUE if successful.
+   */
+  gboolean      (*start_fragment) (GstAdaptiveDemux * demux, GstAdaptiveDemux2Stream * stream);
+  /**
+   * finish_fragment:
+   * @demux: #GstAdaptiveDemux
+   * @stream: #GstAdaptiveDemux2Stream
+   *
+   * Notifies the subclass that a fragment download was finished.
+   * It can be used to cleanup internal state after a fragment and
+   * also push any pending data before moving to the next fragment.
+   */
+  GstFlowReturn (*finish_fragment) (GstAdaptiveDemux * demux, GstAdaptiveDemux2Stream * stream);
+  /**
+   * data_received:
+   * @demux: #GstAdaptiveDemux
+   * @stream: #GstAdaptiveDemux2Stream
+   * @buffer: #GstBuffer
+   *
+   * Notifies the subclass that a fragment chunk was downloaded. The subclass
+   * can look at the data and modify/push data as desired.
+   *
+   * Returns: #GST_FLOW_OK if successful, #GST_FLOW_ERROR in case of error.
+   */
+  GstFlowReturn (*data_received) (GstAdaptiveDemux * demux, GstAdaptiveDemux2Stream * stream, GstBuffer * buffer);
+
+  /**
+   * get_live_seek_range:
+   * @demux: #GstAdaptiveDemux
+   * @start: pointer to put the start position allowed to seek to
+   * @stop: pointer to put the stop position allowed to seek to
+   *
+   * Gets the allowed seek start and stop positions for the current live stream
+   *
+   * Return: %TRUE if successful
+   */
+  gboolean (*get_live_seek_range) (GstAdaptiveDemux * demux, gint64 * start, gint64 * stop);
+
+  /**
+   * get_presentation_offset:
+   * @demux: #GstAdaptiveDemux
+   * @stream: #GstAdaptiveDemux2Stream
+   *
+   * Gets the delay to apply to @stream.
+   *
+   * Return: a #GstClockTime representing the (positive) time offset to apply to
+   * @stream.
+   */
+  GstClockTime (*get_presentation_offset) (GstAdaptiveDemux *demux, GstAdaptiveDemux2Stream *stream);
+
+  /**
+   * get_period_start_time:
+   * @demux: #GstAdaptiveDemux
+   *
+   * Gets the start time of the current period. Timestamps are resetting to 0
+   * after each period but we have to maintain a continuous stream and running
+   * time so need to know the start time of the current period.
+   *
+   * Return: a #GstClockTime representing the start time of the currently
+   * selected period.
+   */
+  GstClockTime (*get_period_start_time) (GstAdaptiveDemux *demux);
+
+  /**
+   * requires_periodical_playlist_update:
+   * @demux: #GstAdaptiveDemux
+   *
+   * Some adaptive streaming protocols allow the client to download
+   * the playlist once and build up the fragment list based on the
+   * current fragment metadata. For those protocols the demuxer
+   * doesn't need to periodically refresh the playlist. This vfunc
+   * is relevant only for live playback scenarios.
+   *
+   * Return: %TRUE if the playlist needs to be refreshed periodically by the demuxer.
+   */
+  gboolean (*requires_periodical_playlist_update) (GstAdaptiveDemux * demux);
+};
+
+GType    gst_adaptive_demux_ng_get_type (void);
+
+GType    gst_adaptive_demux2_stream_get_type (void);
+
+gboolean gst_adaptive_demux2_add_stream (GstAdaptiveDemux *demux,
+                                        GstAdaptiveDemux2Stream *stream);
+
+gboolean gst_adaptive_demux2_stream_add_track (GstAdaptiveDemux2Stream *stream,
+                                              GstAdaptiveDemuxTrack *track);
+
+GstAdaptiveDemuxTrack *gst_adaptive_demux_track_new (GstAdaptiveDemux *demux,
+                                                    GstStreamType type,
+                                                    GstStreamFlags flags,
+                                                    gchar *stream_id,
+                                                    GstCaps *caps,
+                                                    GstTagList *tags);
+GstAdaptiveDemuxTrack *gst_adaptive_demux_track_ref (GstAdaptiveDemuxTrack *track);
+void                   gst_adaptive_demux_track_unref (GstAdaptiveDemuxTrack *track);
+
+
+void gst_adaptive_demux2_stream_set_caps (GstAdaptiveDemux2Stream * stream,
+                                         GstCaps * caps);
+
+void gst_adaptive_demux2_stream_set_tags (GstAdaptiveDemux2Stream * stream,
+                                         GstTagList * tags);
+
+void gst_adaptive_demux2_stream_fragment_clear (GstAdaptiveDemux2StreamFragment * f);
+
+GstFlowReturn gst_adaptive_demux2_stream_push_buffer (GstAdaptiveDemux2Stream * stream,
+                                                     GstBuffer * buffer);
+
+GstFlowReturn gst_adaptive_demux2_stream_advance_fragment (GstAdaptiveDemux * demux,
+                                                          GstAdaptiveDemux2Stream * stream,
+                                                          GstClockTime duration);
+
+gboolean gst_adaptive_demux_start_new_period (GstAdaptiveDemux * demux);
+
+void
+gst_adaptive_demux2_stream_start (GstAdaptiveDemux2Stream * stream);
+
+void gst_adaptive_demux2_stream_queue_event (GstAdaptiveDemux2Stream * stream,
+                                            GstEvent * event);
+
+gboolean gst_adaptive_demux2_stream_is_selected (GstAdaptiveDemux2Stream *stream);
+gboolean gst_adaptive_demux2_stream_is_running (GstAdaptiveDemux2Stream * stream);
+
+GstClockTime gst_adaptive_demux2_get_monotonic_time (GstAdaptiveDemux * demux);
+
+GDateTime *gst_adaptive_demux2_get_client_now_utc (GstAdaptiveDemux * demux);
+
+gboolean gst_adaptive_demux2_is_running (GstAdaptiveDemux * demux);
+
+GstClockTime gst_adaptive_demux2_get_qos_earliest_time (GstAdaptiveDemux *demux);
+
+GstCaps * gst_codec_utils_caps_from_iso_rfc6831 (gchar * codec);
+
+gdouble gst_adaptive_demux_play_rate (GstAdaptiveDemux *demux);
+
+G_END_DECLS
+
+#endif
+
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemuxutils.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemuxutils.c
new file mode 100644 (file)
index 0000000..c0cd9a3
--- /dev/null
@@ -0,0 +1,679 @@
+/* GStreamer
+ *
+ * Copyright (C) 2014 Samsung Electronics. All rights reserved.
+ *   Author: Thiago Santos <thiagoss@osg.samsung.com>
+ * Copyright (C) 2021-2022 Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdio.h>
+#include <gst/gst.h>
+#include <gst/pbutils/pbutils.h>
+
+#include "gstadaptivedemuxutils.h"
+
+GST_DEBUG_CATEGORY_EXTERN (adaptivedemux2_debug);
+#define GST_CAT_DEFAULT adaptivedemux2_debug
+
+struct _GstAdaptiveDemuxClock
+{
+  gint ref_count;
+
+  GstClock *gst_clock;
+  GstClockTimeDiff clock_offset;        /* offset between realtime_clock and UTC */
+};
+
+struct _GstAdaptiveDemuxLoop
+{
+  gint ref_count;
+  GCond cond;
+  GMutex lock;
+
+  GRecMutex context_lock;
+
+  GThread *thread;
+  GMainLoop *loop;
+  GMainContext *context;
+
+  gboolean stopped;
+  gboolean paused;
+};
+
+GstAdaptiveDemuxClock *
+gst_adaptive_demux_clock_new (void)
+{
+  GstAdaptiveDemuxClock *clock = g_slice_new (GstAdaptiveDemuxClock);
+  GstClockType clock_type = GST_CLOCK_TYPE_OTHER;
+  GObjectClass *gobject_class;
+
+  g_atomic_int_set (&clock->ref_count, 1);
+
+  clock->gst_clock = gst_system_clock_obtain ();
+  g_assert (clock->gst_clock != NULL);
+
+  gobject_class = G_OBJECT_GET_CLASS (clock->gst_clock);
+  if (g_object_class_find_property (gobject_class, "clock-type")) {
+    g_object_get (clock->gst_clock, "clock-type", &clock_type, NULL);
+  } else {
+    GST_WARNING ("System clock does not have clock-type property");
+  }
+
+  if (clock_type == GST_CLOCK_TYPE_REALTIME) {
+    clock->clock_offset = 0;
+  } else {
+    GDateTime *utc_now;
+
+    utc_now = g_date_time_new_now_utc ();
+    gst_adaptive_demux_clock_set_utc_time (clock, utc_now);
+    g_date_time_unref (utc_now);
+  }
+
+  return clock;
+}
+
+GstAdaptiveDemuxClock *
+gst_adaptive_demux_clock_ref (GstAdaptiveDemuxClock * clock)
+{
+  g_return_val_if_fail (clock != NULL, NULL);
+  g_atomic_int_inc (&clock->ref_count);
+  return clock;
+}
+
+void
+gst_adaptive_demux_clock_unref (GstAdaptiveDemuxClock * clock)
+{
+  g_return_if_fail (clock != NULL);
+  if (g_atomic_int_dec_and_test (&clock->ref_count)) {
+    gst_object_unref (clock->gst_clock);
+    g_slice_free (GstAdaptiveDemuxClock, clock);
+  }
+}
+
+GstClockTime
+gst_adaptive_demux_clock_get_time (GstAdaptiveDemuxClock * clock)
+{
+  g_return_val_if_fail (clock != NULL, GST_CLOCK_TIME_NONE);
+  return gst_clock_get_time (clock->gst_clock);
+}
+
+GDateTime *
+gst_adaptive_demux_clock_get_now_utc (GstAdaptiveDemuxClock * clock)
+{
+  GstClockTime rtc_now;
+  GDateTime *unix_datetime;
+  GDateTime *result_datetime;
+  gint64 utc_now_in_us;
+
+  rtc_now = gst_clock_get_time (clock->gst_clock);
+  utc_now_in_us = clock->clock_offset + GST_TIME_AS_USECONDS (rtc_now);
+  unix_datetime =
+      g_date_time_new_from_unix_utc (utc_now_in_us / G_TIME_SPAN_SECOND);
+  result_datetime =
+      g_date_time_add (unix_datetime, utc_now_in_us % G_TIME_SPAN_SECOND);
+  g_date_time_unref (unix_datetime);
+  return result_datetime;
+}
+
+void
+gst_adaptive_demux_clock_set_utc_time (GstAdaptiveDemuxClock * clock,
+    GDateTime * utc_now)
+{
+  GstClockTime rtc_now = gst_clock_get_time (clock->gst_clock);
+  GstClockTimeDiff clock_offset;
+
+  clock_offset =
+      g_date_time_to_unix (utc_now) * G_TIME_SPAN_SECOND +
+      g_date_time_get_microsecond (utc_now) - GST_TIME_AS_USECONDS (rtc_now);
+
+  GST_INFO ("Changing UTC clock offset to %" GST_STIME_FORMAT
+      " was %" GST_STIME_FORMAT, GST_STIME_ARGS (clock_offset),
+      GST_STIME_ARGS (clock->clock_offset));
+
+  clock->clock_offset = clock_offset;
+}
+
+GstAdaptiveDemuxLoop *
+gst_adaptive_demux_loop_new (void)
+{
+  GstAdaptiveDemuxLoop *loop = g_slice_new0 (GstAdaptiveDemuxLoop);
+  g_atomic_int_set (&loop->ref_count, 1);
+
+  g_mutex_init (&loop->lock);
+  g_rec_mutex_init (&loop->context_lock);
+  g_cond_init (&loop->cond);
+
+  loop->stopped = TRUE;
+  loop->paused = FALSE;
+
+  return loop;
+}
+
+GstAdaptiveDemuxLoop *
+gst_adaptive_demux_loop_ref (GstAdaptiveDemuxLoop * loop)
+{
+  g_return_val_if_fail (loop != NULL, NULL);
+  g_atomic_int_inc (&loop->ref_count);
+  return loop;
+}
+
+void
+gst_adaptive_demux_loop_unref (GstAdaptiveDemuxLoop * loop)
+{
+  g_return_if_fail (loop != NULL);
+  if (g_atomic_int_dec_and_test (&loop->ref_count)) {
+    gst_adaptive_demux_loop_stop (loop, TRUE);
+
+    g_mutex_clear (&loop->lock);
+    g_rec_mutex_clear (&loop->context_lock);
+    g_cond_clear (&loop->cond);
+
+    g_slice_free (GstAdaptiveDemuxLoop, loop);
+  }
+}
+
+static gpointer
+_gst_adaptive_demux_loop_thread (GstAdaptiveDemuxLoop * loop)
+{
+  g_mutex_lock (&loop->lock);
+
+  loop->loop = g_main_loop_new (loop->context, FALSE);
+
+  while (!loop->stopped) {
+    g_mutex_unlock (&loop->lock);
+
+    g_rec_mutex_lock (&loop->context_lock);
+
+    g_main_context_push_thread_default (loop->context);
+    g_main_loop_run (loop->loop);
+    g_main_context_pop_thread_default (loop->context);
+
+    g_rec_mutex_unlock (&loop->context_lock);
+
+    g_mutex_lock (&loop->lock);
+    while (loop->paused)
+      g_cond_wait (&loop->cond, &loop->lock);
+  }
+
+  g_main_loop_unref (loop->loop);
+  loop->loop = NULL;
+
+  g_cond_broadcast (&loop->cond);
+  g_mutex_unlock (&loop->lock);
+
+  g_main_context_unref (loop->context);
+  loop->context = NULL;
+
+  gst_adaptive_demux_loop_unref (loop);
+
+  return NULL;
+}
+
+void
+gst_adaptive_demux_loop_start (GstAdaptiveDemuxLoop * loop)
+{
+  g_mutex_lock (&loop->lock);
+  if (loop->thread != NULL && !loop->stopped)
+    goto done;                  /* Already running */
+
+  loop->stopped = FALSE;
+  loop->context = g_main_context_new ();
+
+  loop->thread =
+      g_thread_new ("AdaptiveDemux",
+      (GThreadFunc) _gst_adaptive_demux_loop_thread,
+      gst_adaptive_demux_loop_ref (loop));
+
+done:
+  g_mutex_unlock (&loop->lock);
+}
+
+static gboolean
+do_quit_cb (GstAdaptiveDemuxLoop * loop)
+{
+  g_main_loop_quit (loop->loop);
+  return G_SOURCE_REMOVE;
+}
+
+void
+gst_adaptive_demux_loop_stop (GstAdaptiveDemuxLoop * loop, gboolean wait)
+{
+  g_mutex_lock (&loop->lock);
+  loop->stopped = TRUE;
+
+  if (loop->loop != NULL) {
+    GSource *s = g_idle_source_new ();
+    g_source_set_callback (s, (GSourceFunc) do_quit_cb,
+        gst_adaptive_demux_loop_ref (loop),
+        (GDestroyNotify) gst_adaptive_demux_loop_unref);
+    g_source_attach (s, loop->context);
+    g_source_unref (s);
+
+    if (wait) {
+      while (loop->loop != NULL)
+        g_cond_wait (&loop->cond, &loop->lock);
+    }
+  }
+
+  g_mutex_unlock (&loop->lock);
+}
+
+gboolean
+gst_adaptive_demux_loop_pause_and_lock (GstAdaptiveDemuxLoop * loop)
+{
+  /* Try and acquire the context lock directly. This will succeed
+   * if called when the loop is not running, and we can avoid
+   * adding an unnecessary extra idle source to quit the loop. */
+  if (!g_rec_mutex_trylock (&loop->context_lock)) {
+    g_mutex_lock (&loop->lock);
+
+    if (loop->stopped) {
+      g_mutex_unlock (&loop->lock);
+      return FALSE;
+    }
+
+    loop->paused = TRUE;
+
+    {
+      GSource *s = g_idle_source_new ();
+      g_source_set_callback (s, (GSourceFunc) do_quit_cb,
+          gst_adaptive_demux_loop_ref (loop),
+          (GDestroyNotify) gst_adaptive_demux_loop_unref);
+      g_source_attach (s, loop->context);
+      g_source_unref (s);
+    }
+
+    g_mutex_unlock (&loop->lock);
+
+    g_rec_mutex_lock (&loop->context_lock);
+  }
+  g_main_context_push_thread_default (loop->context);
+
+  return TRUE;
+}
+
+gboolean
+gst_adaptive_demux_loop_unlock_and_unpause (GstAdaptiveDemuxLoop * loop)
+{
+  g_main_context_pop_thread_default (loop->context);
+  g_rec_mutex_unlock (&loop->context_lock);
+
+  g_mutex_lock (&loop->lock);
+  loop->paused = FALSE;
+
+  if (loop->stopped) {
+    g_mutex_unlock (&loop->lock);
+    return FALSE;
+  }
+
+  /* Wake up the loop to run again */
+  g_cond_broadcast (&loop->cond);
+  g_mutex_unlock (&loop->lock);
+
+  return TRUE;
+}
+
+guint
+gst_adaptive_demux_loop_call (GstAdaptiveDemuxLoop * loop, GSourceFunc func,
+    gpointer data, GDestroyNotify notify)
+{
+  guint ret = 0;
+
+  g_mutex_lock (&loop->lock);
+  if (loop->context) {
+    GSource *s = g_idle_source_new ();
+    g_source_set_callback (s, func, data, notify);
+    ret = g_source_attach (s, loop->context);
+    g_source_unref (s);
+  } else if (notify != NULL) {
+    notify (data);
+  }
+
+  g_mutex_unlock (&loop->lock);
+
+  return ret;
+}
+
+guint
+gst_adaptive_demux_loop_call_delayed (GstAdaptiveDemuxLoop * loop,
+    GstClockTime delay, GSourceFunc func, gpointer data, GDestroyNotify notify)
+{
+  guint ret = 0;
+
+  g_mutex_lock (&loop->lock);
+  if (loop->context) {
+    GSource *s = g_timeout_source_new (GST_TIME_AS_MSECONDS (delay));
+    g_source_set_callback (s, func, data, notify);
+    ret = g_source_attach (s, loop->context);
+    g_source_unref (s);
+  } else if (notify != NULL) {
+    notify (data);
+  }
+
+  g_mutex_unlock (&loop->lock);
+
+  return ret;
+}
+
+void
+gst_adaptive_demux_loop_cancel_call (GstAdaptiveDemuxLoop * loop, guint cb_id)
+{
+  GSource *s;
+
+  g_mutex_lock (&loop->lock);
+  s = g_main_context_find_source_by_id (loop->context, cb_id);
+  if (s)
+    g_source_destroy (s);
+  g_mutex_unlock (&loop->lock);
+}
+
+struct Rfc5322TimeZone
+{
+  const gchar *name;
+  gfloat tzoffset;
+};
+
+/*
+ Parse an RFC5322 (section 3.3) date-time from the Date: field in the
+ HTTP response.
+ See https://tools.ietf.org/html/rfc5322#section-3.3
+*/
+GstDateTime *
+gst_adaptive_demux_util_parse_http_head_date (const gchar * http_date)
+{
+  static const gchar *months[] = { NULL, "Jan", "Feb", "Mar", "Apr",
+    "May", "Jun", "Jul", "Aug",
+    "Sep", "Oct", "Nov", "Dec", NULL
+  };
+  static const struct Rfc5322TimeZone timezones[] = {
+    {"Z", 0},
+    {"UT", 0},
+    {"GMT", 0},
+    {"BST", 1},
+    {"EST", -5},
+    {"EDT", -4},
+    {"CST", -6},
+    {"CDT", -5},
+    {"MST", -7},
+    {"MDT", -6},
+    {"PST", -8},
+    {"PDT", -7},
+    {NULL, 0}
+  };
+  gint ret;
+  const gchar *pos;
+  gint year = -1, month = -1, day = -1, hour = -1, minute = -1, second = -1;
+  gchar zone[6];
+  gchar monthstr[4];
+  gfloat tzoffset = 0;
+  gboolean parsed_tz = FALSE;
+
+  g_return_val_if_fail (http_date != NULL, NULL);
+
+  /* skip optional text version of day of the week */
+  pos = strchr (http_date, ',');
+  if (pos)
+    pos++;
+  else
+    pos = http_date;
+
+  ret =
+      sscanf (pos, "%02d %3s %04d %02d:%02d:%02d %5s", &day, monthstr, &year,
+      &hour, &minute, &second, zone);
+
+  if (ret == 7) {
+    gchar *z = zone;
+    gint i;
+
+    for (i = 1; months[i]; ++i) {
+      if (g_ascii_strncasecmp (months[i], monthstr, strlen (months[i])) == 0) {
+        month = i;
+        break;
+      }
+    }
+    for (i = 0; timezones[i].name && !parsed_tz; ++i) {
+      if (g_ascii_strncasecmp (timezones[i].name, z,
+              strlen (timezones[i].name)) == 0) {
+        tzoffset = timezones[i].tzoffset;
+        parsed_tz = TRUE;
+      }
+    }
+    if (!parsed_tz) {
+      gint hh, mm;
+      gboolean neg = FALSE;
+      /* check if it is in the form +-HHMM */
+      if (*z == '+' || *z == '-') {
+        if (*z == '+')
+          ++z;
+        else if (*z == '-') {
+          ++z;
+          neg = TRUE;
+        }
+        ret = sscanf (z, "%02d%02d", &hh, &mm);
+        if (ret == 2) {
+          tzoffset = hh;
+          tzoffset += mm / 60.0;
+          if (neg)
+            tzoffset = -tzoffset;
+          parsed_tz = TRUE;
+        }
+      }
+    }
+    /* Accept year in both 2 digit or 4 digit format */
+    if (year < 100)
+      year += 2000;
+  }
+
+  if (month < 1 || !parsed_tz)
+    return NULL;
+
+  return gst_date_time_new (tzoffset, year, month, day, hour, minute, second);
+}
+
+typedef struct
+{
+  gboolean delivered;
+  GstEvent *event;
+} PadEvent;
+
+void
+gst_event_store_init (GstEventStore * store)
+{
+  store->events = g_array_sized_new (FALSE, TRUE, sizeof (PadEvent), 16);
+  store->events_pending = FALSE;
+}
+
+void
+gst_event_store_flush (GstEventStore * store)
+{
+  guint i, len;
+  GArray *events = store->events;
+
+  len = events->len;
+  for (i = 0; i < len; i++) {
+    PadEvent *ev = &g_array_index (events, PadEvent, i);
+    GstEvent *event = ev->event;
+
+    ev->event = NULL;
+
+    gst_event_unref (event);
+  }
+  g_array_set_size (events, 0);
+
+  store->events_pending = FALSE;
+}
+
+void
+gst_event_store_deinit (GstEventStore * store)
+{
+  gst_event_store_flush (store);
+  g_array_free (store->events, TRUE);
+}
+
+void
+gst_event_store_insert_event (GstEventStore * store, GstEvent * event,
+    gboolean delivered)
+{
+  guint i, len;
+  GstEventType type;
+  GArray *events;
+  GQuark name_id = 0;
+  gboolean insert = TRUE;
+
+  type = GST_EVENT_TYPE (event);
+
+  if (type & GST_EVENT_TYPE_STICKY_MULTI)
+    name_id = gst_structure_get_name_id (gst_event_get_structure (event));
+
+  events = store->events;
+
+  len = events->len;
+  for (i = 0; i < len; i++) {
+    PadEvent *ev = &g_array_index (events, PadEvent, i);
+
+    if (ev->event == NULL)
+      continue;
+
+    if (type == GST_EVENT_TYPE (ev->event)) {
+      /* matching types, check matching name if needed */
+      if (name_id && !gst_event_has_name_id (ev->event, name_id))
+        continue;
+
+      /* overwrite if different */
+      if (gst_event_replace (&ev->event, event)) {
+        ev->delivered = delivered;
+        /* If the event was not delivered, mark that we have a pending
+         * undelivered event */
+        if (!delivered)
+          store->events_pending = TRUE;
+      }
+
+      insert = FALSE;
+      break;
+    }
+
+    if (type < GST_EVENT_TYPE (ev->event) || (type != GST_EVENT_TYPE (ev->event)
+            && GST_EVENT_TYPE (ev->event) == GST_EVENT_EOS)) {
+      /* STREAM_START, CAPS and SEGMENT must be delivered in this order. By
+       * storing the sticky ordered we can check that this is respected. */
+      if (G_UNLIKELY (GST_EVENT_TYPE (ev->event) <= GST_EVENT_SEGMENT
+              || GST_EVENT_TYPE (ev->event) == GST_EVENT_EOS))
+        g_warning (G_STRLOC
+            ":%s:<store %p> Sticky event misordering, got '%s' before '%s'",
+            G_STRFUNC, store,
+            gst_event_type_get_name (GST_EVENT_TYPE (ev->event)),
+            gst_event_type_get_name (type));
+      break;
+    }
+  }
+  if (insert) {
+    PadEvent ev;
+    ev.event = gst_event_ref (event);
+    ev.delivered = delivered;
+    g_array_insert_val (events, i, ev);
+
+    /* If the event was not delivered, mark that we have a pending
+     * undelivered event */
+    if (!delivered)
+      store->events_pending = TRUE;
+    GST_LOG ("store %p stored sticky event %s", store,
+        GST_EVENT_TYPE_NAME (event));
+  }
+}
+
+/* Find the first non-pending event and return a ref to it, owned by the caller */
+GstEvent *
+gst_event_store_get_next_pending (GstEventStore * store)
+{
+  GArray *events;
+  guint i, len;
+
+  if (!store->events_pending)
+    return NULL;
+
+  events = store->events;
+  len = events->len;
+  for (i = 0; i < len; i++) {
+    PadEvent *ev = &g_array_index (events, PadEvent, i);
+
+    if (ev->event == NULL || ev->delivered)
+      continue;
+
+    /* Found an undelivered event, return it. The caller will mark it
+     * as delivered once it has done so successfully by calling
+     * gst_event_store_mark_delivered() */
+    return gst_event_ref (ev->event);
+  }
+
+  store->events_pending = FALSE;
+  return NULL;
+}
+
+void
+gst_event_store_mark_delivered (GstEventStore * store, GstEvent * event)
+{
+  gboolean events_pending = FALSE;
+  GArray *events;
+  guint i, len;
+
+  events = store->events;
+  len = events->len;
+  for (i = 0; i < len; i++) {
+    PadEvent *ev = &g_array_index (events, PadEvent, i);
+
+    if (ev->event == NULL)
+      continue;
+
+    /* Check if there are any pending events other than
+     * the passed one, so we can update the events_pending
+     * flag at the end */
+    if (ev->event != event && !ev->delivered) {
+      events_pending = TRUE;
+      continue;
+    }
+
+    ev->delivered = TRUE;
+  }
+
+  store->events_pending = events_pending;
+}
+
+void
+gst_event_store_mark_all_undelivered (GstEventStore * store)
+{
+  gboolean events_pending = FALSE;
+  GArray *events;
+  guint i, len;
+
+  events = store->events;
+  len = events->len;
+  for (i = 0; i < len; i++) {
+    PadEvent *ev = &g_array_index (events, PadEvent, i);
+
+    if (ev->event == NULL)
+      continue;
+
+    ev->delivered = FALSE;
+    events_pending = TRUE;
+  }
+
+  /* Only set the flag if there was at least
+   * one sticky event in the store */
+  store->events_pending = events_pending;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemuxutils.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/gstadaptivedemuxutils.h
new file mode 100644 (file)
index 0000000..de5a4cb
--- /dev/null
@@ -0,0 +1,75 @@
+/* GStreamer
+ *
+ * Copyright (C) 2014 Samsung Electronics. All rights reserved.
+ *   Author: Thiago Santos <thiagoss@osg.samsung.com>
+ * Copyright (C) 2021-2022 Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef _GST_ADAPTIVE_DEMUX_UTILS_H_
+#define _GST_ADAPTIVE_DEMUX_UTILS_H_
+
+#include <gst/gst.h>
+
+typedef struct _GstAdaptiveDemuxClock GstAdaptiveDemuxClock;
+
+typedef struct _GstAdaptiveDemuxLoop GstAdaptiveDemuxLoop;
+
+GstAdaptiveDemuxClock *gst_adaptive_demux_clock_new (void);
+GstAdaptiveDemuxClock *gst_adaptive_demux_clock_ref (GstAdaptiveDemuxClock *
+    clock);
+void gst_adaptive_demux_clock_unref (GstAdaptiveDemuxClock * clock);
+
+GstClockTime gst_adaptive_demux_clock_get_time (GstAdaptiveDemuxClock * clock);
+GDateTime *gst_adaptive_demux_clock_get_now_utc (GstAdaptiveDemuxClock * clock);
+void gst_adaptive_demux_clock_set_utc_time (GstAdaptiveDemuxClock * clock, GDateTime *utc_now);
+
+GstAdaptiveDemuxLoop *gst_adaptive_demux_loop_new (void);
+GstAdaptiveDemuxLoop *gst_adaptive_demux_loop_ref (GstAdaptiveDemuxLoop * loop);
+void gst_adaptive_demux_loop_unref (GstAdaptiveDemuxLoop * loop);
+
+void gst_adaptive_demux_loop_start (GstAdaptiveDemuxLoop *loop);
+void gst_adaptive_demux_loop_stop (GstAdaptiveDemuxLoop * loop, gboolean wait);
+
+guint gst_adaptive_demux_loop_call (GstAdaptiveDemuxLoop *loop, GSourceFunc func,
+    gpointer data, GDestroyNotify notify);
+guint gst_adaptive_demux_loop_call_delayed (GstAdaptiveDemuxLoop *loop, GstClockTime delay,
+    GSourceFunc func, gpointer data, GDestroyNotify notify);
+void gst_adaptive_demux_loop_cancel_call (GstAdaptiveDemuxLoop *loop, guint cb_id);
+
+gboolean gst_adaptive_demux_loop_pause_and_lock (GstAdaptiveDemuxLoop * loop);
+gboolean gst_adaptive_demux_loop_unlock_and_unpause (GstAdaptiveDemuxLoop * loop);
+
+GstDateTime *gst_adaptive_demux_util_parse_http_head_date (const gchar *http_date);
+
+typedef struct _GstEventStore GstEventStore;
+
+struct _GstEventStore {
+  GArray *events;
+  gboolean events_pending;
+};
+
+void gst_event_store_init(GstEventStore *store);
+void gst_event_store_deinit(GstEventStore *store);
+
+void gst_event_store_flush(GstEventStore *store);
+
+void gst_event_store_insert_event (GstEventStore *store, GstEvent * event, gboolean delivered);
+GstEvent *gst_event_store_get_next_pending (GstEventStore *store);
+void gst_event_store_mark_delivered (GstEventStore *store, GstEvent *event);
+void gst_event_store_mark_all_undelivered (GstEventStore *store);
+#endif
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/gstisoff.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/gstisoff.c
new file mode 100644 (file)
index 0000000..b119988
--- /dev/null
@@ -0,0 +1,979 @@
+/*
+ * ISO File Format parsing library
+ *
+ * gstisoff.h
+ *
+ * Copyright (C) 2015 Samsung Electronics. All rights reserved.
+ *   Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstisoff.h"
+#include <gst/base/gstbytereader.h>
+
+#include <string.h>
+
+GST_DEBUG_CATEGORY_STATIC (gst_isoff_debug);
+#define GST_CAT_DEFAULT gst_isoff_debug
+
+static gboolean initialized = FALSE;
+
+#define INITIALIZE_DEBUG_CATEGORY \
+  if (!initialized) { \
+  GST_DEBUG_CATEGORY_INIT (gst_isoff_debug, "isoff", 0, \
+      "ISO File Format parsing library"); \
+    initialized = TRUE; \
+  }
+
+static const guint8 tfrf_uuid[] = {
+  0xd4, 0x80, 0x7e, 0xf2, 0xca, 0x39, 0x46, 0x95,
+  0x8e, 0x54, 0x26, 0xcb, 0x9e, 0x46, 0xa7, 0x9f
+};
+
+static const guint8 tfxd_uuid[] = {
+  0x6d, 0x1d, 0x9b, 0x05, 0x42, 0xd5, 0x44, 0xe6,
+  0x80, 0xe2, 0x14, 0x1d, 0xaf, 0xf7, 0x57, 0xb2
+};
+
+/* gst_isoff_parse_box_header:
+ * @reader:
+ * @type: type that was found at the current position
+ * @extended_type: (allow-none): extended type if type=='uuid'
+ * @header_size: (allow-none): size of the box header (type, extended type and size)
+ * @size: size of the complete box including type, extended type and size
+ *
+ * Advances the byte reader to the start of the box content. To skip
+ * over the complete box, skip size - header_size bytes.
+ *
+ * Returns: TRUE if a box header could be parsed, FALSE if more data is needed
+ */
+gboolean
+gst_isoff_parse_box_header (GstByteReader * reader, guint32 * type,
+    guint8 extended_type[16], guint * header_size, guint64 * size)
+{
+  guint header_start_offset;
+  guint32 size_field;
+
+  INITIALIZE_DEBUG_CATEGORY;
+  header_start_offset = gst_byte_reader_get_pos (reader);
+
+  if (gst_byte_reader_get_remaining (reader) < 8)
+    goto not_enough_data;
+
+  size_field = gst_byte_reader_get_uint32_be_unchecked (reader);
+  *type = gst_byte_reader_get_uint32_le_unchecked (reader);
+
+  if (size_field == 1) {
+    if (gst_byte_reader_get_remaining (reader) < 8)
+      goto not_enough_data;
+    *size = gst_byte_reader_get_uint64_be_unchecked (reader);
+  } else {
+    *size = size_field;
+  }
+
+  if (*type == GST_ISOFF_FOURCC_UUID) {
+    if (gst_byte_reader_get_remaining (reader) < 16)
+      goto not_enough_data;
+
+    if (extended_type)
+      memcpy (extended_type, gst_byte_reader_get_data_unchecked (reader, 16),
+          16);
+  }
+
+  if (header_size)
+    *header_size = gst_byte_reader_get_pos (reader) - header_start_offset;
+
+  return TRUE;
+
+not_enough_data:
+  gst_byte_reader_set_pos (reader, header_start_offset);
+  return FALSE;
+}
+
+static void
+gst_isoff_trun_box_clear (GstTrunBox * trun)
+{
+  if (trun->samples)
+    g_array_free (trun->samples, TRUE);
+}
+
+static void
+gst_isoff_tfrf_box_free (GstTfrfBox * tfrf)
+{
+  if (tfrf->entries)
+    g_array_free (tfrf->entries, TRUE);
+
+  g_free (tfrf);
+}
+
+static void
+gst_isoff_traf_box_clear (GstTrafBox * traf)
+{
+  if (traf->trun)
+    g_array_free (traf->trun, TRUE);
+
+  if (traf->tfrf)
+    gst_isoff_tfrf_box_free (traf->tfrf);
+
+  g_free (traf->tfxd);
+  traf->trun = NULL;
+  traf->tfrf = NULL;
+  traf->tfxd = NULL;
+}
+
+static gboolean
+gst_isoff_mfhd_box_parse (GstMfhdBox * mfhd, GstByteReader * reader)
+{
+  guint8 version;
+  guint32 flags;
+
+  if (gst_byte_reader_get_remaining (reader) != 8)
+    return FALSE;
+
+  version = gst_byte_reader_get_uint8_unchecked (reader);
+  if (version != 0)
+    return FALSE;
+
+  flags = gst_byte_reader_get_uint24_be_unchecked (reader);
+  if (flags != 0)
+    return FALSE;
+
+  mfhd->sequence_number = gst_byte_reader_get_uint32_be_unchecked (reader);
+
+  return TRUE;
+}
+
+static gboolean
+gst_isoff_tfhd_box_parse (GstTfhdBox * tfhd, GstByteReader * reader)
+{
+  memset (tfhd, 0, sizeof (*tfhd));
+
+  if (gst_byte_reader_get_remaining (reader) < 4)
+    return FALSE;
+
+  tfhd->version = gst_byte_reader_get_uint8_unchecked (reader);
+  if (tfhd->version != 0)
+    return FALSE;
+
+  tfhd->flags = gst_byte_reader_get_uint24_be_unchecked (reader);
+
+  if (!gst_byte_reader_get_uint32_be (reader, &tfhd->track_id))
+    return FALSE;
+
+  if ((tfhd->flags & GST_TFHD_FLAGS_BASE_DATA_OFFSET_PRESENT) &&
+      !gst_byte_reader_get_uint64_be (reader, &tfhd->base_data_offset))
+    return FALSE;
+
+  if ((tfhd->flags & GST_TFHD_FLAGS_SAMPLE_DESCRIPTION_INDEX_PRESENT) &&
+      !gst_byte_reader_get_uint32_be (reader, &tfhd->sample_description_index))
+    return FALSE;
+
+  if ((tfhd->flags & GST_TFHD_FLAGS_DEFAULT_SAMPLE_DURATION_PRESENT) &&
+      !gst_byte_reader_get_uint32_be (reader, &tfhd->default_sample_duration))
+    return FALSE;
+
+  if ((tfhd->flags & GST_TFHD_FLAGS_DEFAULT_SAMPLE_SIZE_PRESENT) &&
+      !gst_byte_reader_get_uint32_be (reader, &tfhd->default_sample_size))
+    return FALSE;
+
+  if ((tfhd->flags & GST_TFHD_FLAGS_DEFAULT_SAMPLE_FLAGS_PRESENT) &&
+      !gst_byte_reader_get_uint32_be (reader, &tfhd->default_sample_flags))
+    return FALSE;
+
+  return TRUE;
+}
+
+static gboolean
+gst_isoff_trun_box_parse (GstTrunBox * trun, GstByteReader * reader)
+{
+  gint i;
+
+  memset (trun, 0, sizeof (*trun));
+
+  if (gst_byte_reader_get_remaining (reader) < 4)
+    return FALSE;
+
+  trun->version = gst_byte_reader_get_uint8_unchecked (reader);
+  if (trun->version != 0 && trun->version != 1)
+    return FALSE;
+
+  trun->flags = gst_byte_reader_get_uint24_be_unchecked (reader);
+
+  if (!gst_byte_reader_get_uint32_be (reader, &trun->sample_count))
+    return FALSE;
+
+  trun->samples =
+      g_array_sized_new (FALSE, FALSE, sizeof (GstTrunSample),
+      trun->sample_count);
+
+  if ((trun->flags & GST_TRUN_FLAGS_DATA_OFFSET_PRESENT) &&
+      !gst_byte_reader_get_uint32_be (reader, (guint32 *) & trun->data_offset))
+    return FALSE;
+
+  if ((trun->flags & GST_TRUN_FLAGS_FIRST_SAMPLE_FLAGS_PRESENT) &&
+      !gst_byte_reader_get_uint32_be (reader, &trun->first_sample_flags))
+    return FALSE;
+
+  for (i = 0; i < trun->sample_count; i++) {
+    GstTrunSample sample = { 0, };
+
+    if ((trun->flags & GST_TRUN_FLAGS_SAMPLE_DURATION_PRESENT) &&
+        !gst_byte_reader_get_uint32_be (reader, &sample.sample_duration))
+      goto error;
+
+    if ((trun->flags & GST_TRUN_FLAGS_SAMPLE_SIZE_PRESENT) &&
+        !gst_byte_reader_get_uint32_be (reader, &sample.sample_size))
+      goto error;
+
+    if ((trun->flags & GST_TRUN_FLAGS_SAMPLE_FLAGS_PRESENT) &&
+        !gst_byte_reader_get_uint32_be (reader, &sample.sample_flags))
+      goto error;
+
+    if ((trun->flags & GST_TRUN_FLAGS_SAMPLE_COMPOSITION_TIME_OFFSETS_PRESENT)
+        && !gst_byte_reader_get_uint32_be (reader,
+            &sample.sample_composition_time_offset.u))
+      goto error;
+
+    g_array_append_val (trun->samples, sample);
+  }
+
+  return TRUE;
+
+error:
+  gst_isoff_trun_box_clear (trun);
+  return FALSE;
+}
+
+static gboolean
+gst_isoff_tfdt_box_parse (GstTfdtBox * tfdt, GstByteReader * reader)
+{
+  gint8 version;
+
+  memset (tfdt, 0, sizeof (*tfdt));
+
+  if (gst_byte_reader_get_remaining (reader) < 4)
+    return FALSE;
+
+  version = gst_byte_reader_get_uint8_unchecked (reader);
+
+  if (!gst_byte_reader_skip (reader, 3))
+    return FALSE;
+
+  if (version == 1) {
+    if (!gst_byte_reader_get_uint64_be (reader, &tfdt->decode_time))
+      return FALSE;
+  } else {
+    guint32 dec_time = 0;
+    if (!gst_byte_reader_get_uint32_be (reader, &dec_time))
+      return FALSE;
+    tfdt->decode_time = dec_time;
+  }
+
+  return TRUE;
+}
+
+static gboolean
+gst_isoff_tfxd_box_parse (GstTfxdBox * tfxd, GstByteReader * reader)
+{
+  guint8 version;
+  guint32 flags = 0;
+  guint64 absolute_time = 0;
+  guint64 absolute_duration = 0;
+
+  memset (tfxd, 0, sizeof (*tfxd));
+
+  if (gst_byte_reader_get_remaining (reader) < 4)
+    return FALSE;
+
+  if (!gst_byte_reader_get_uint8 (reader, &version)) {
+    GST_ERROR ("Error getting box's version field");
+    return FALSE;
+  }
+
+  if (!gst_byte_reader_get_uint24_be (reader, &flags)) {
+    GST_ERROR ("Error getting box's flags field");
+    return FALSE;
+  }
+
+  tfxd->version = version;
+  tfxd->flags = flags;
+
+  if (gst_byte_reader_get_remaining (reader) < ((version & 0x01) ? 16 : 8))
+    return FALSE;
+
+  if (version & 0x01) {
+    gst_byte_reader_get_uint64_be (reader, &absolute_time);
+    gst_byte_reader_get_uint64_be (reader, &absolute_duration);
+  } else {
+    guint32 time = 0;
+    guint32 duration = 0;
+    gst_byte_reader_get_uint32_be (reader, &time);
+    gst_byte_reader_get_uint32_be (reader, &duration);
+    absolute_time = time;
+    absolute_duration = duration;
+  }
+
+  tfxd->time = absolute_time;
+  tfxd->duration = absolute_duration;
+
+  return TRUE;
+}
+
+static gboolean
+gst_isoff_tfrf_box_parse (GstTfrfBox * tfrf, GstByteReader * reader)
+{
+  guint8 version;
+  guint32 flags = 0;
+  guint8 fragment_count = 0;
+  guint8 index = 0;
+
+  memset (tfrf, 0, sizeof (*tfrf));
+
+  if (gst_byte_reader_get_remaining (reader) < 4)
+    return FALSE;
+
+  if (!gst_byte_reader_get_uint8 (reader, &version)) {
+    GST_ERROR ("Error getting box's version field");
+    return FALSE;
+  }
+
+  if (!gst_byte_reader_get_uint24_be (reader, &flags)) {
+    GST_ERROR ("Error getting box's flags field");
+    return FALSE;
+  }
+
+  tfrf->version = version;
+  tfrf->flags = flags;
+
+  if (!gst_byte_reader_get_uint8 (reader, &fragment_count))
+    return FALSE;
+
+  tfrf->entries_count = fragment_count;
+  tfrf->entries =
+      g_array_sized_new (FALSE, FALSE, sizeof (GstTfrfBoxEntry),
+      tfrf->entries_count);
+
+  for (index = 0; index < fragment_count; index++) {
+    GstTfrfBoxEntry entry = { 0, };
+    guint64 absolute_time = 0;
+    guint64 absolute_duration = 0;
+    if (gst_byte_reader_get_remaining (reader) < ((version & 0x01) ? 16 : 8))
+      return FALSE;
+
+    if (version & 0x01) {
+      if (!gst_byte_reader_get_uint64_be (reader, &absolute_time) ||
+          !gst_byte_reader_get_uint64_be (reader, &absolute_duration)) {
+        return FALSE;
+      }
+    } else {
+      guint32 time = 0;
+      guint32 duration = 0;
+      if (!gst_byte_reader_get_uint32_be (reader, &time) ||
+          !gst_byte_reader_get_uint32_be (reader, &duration)) {
+        return FALSE;
+      }
+      absolute_time = time;
+      absolute_duration = duration;
+    }
+    entry.time = absolute_time;
+    entry.duration = absolute_duration;
+
+    g_array_append_val (tfrf->entries, entry);
+  }
+
+  return TRUE;
+}
+
+static gboolean
+gst_isoff_traf_box_parse (GstTrafBox * traf, GstByteReader * reader)
+{
+  gboolean had_tfhd = FALSE;
+
+  memset (traf, 0, sizeof (*traf));
+  traf->trun = g_array_new (FALSE, FALSE, sizeof (GstTrunBox));
+  g_array_set_clear_func (traf->trun,
+      (GDestroyNotify) gst_isoff_trun_box_clear);
+
+  traf->tfdt.decode_time = GST_CLOCK_TIME_NONE;
+
+  while (gst_byte_reader_get_remaining (reader) > 0) {
+    guint32 fourcc;
+    guint header_size;
+    guint64 size;
+    GstByteReader sub_reader;
+    guint8 extended_type[16] = { 0, };
+
+    if (!gst_isoff_parse_box_header (reader, &fourcc, extended_type,
+            &header_size, &size))
+      goto error;
+    if (gst_byte_reader_get_remaining (reader) < size - header_size)
+      goto error;
+
+    switch (fourcc) {
+      case GST_ISOFF_FOURCC_TFHD:{
+        gst_byte_reader_get_sub_reader (reader, &sub_reader,
+            size - header_size);
+        if (!gst_isoff_tfhd_box_parse (&traf->tfhd, &sub_reader))
+          goto error;
+        had_tfhd = TRUE;
+        break;
+      }
+      case GST_ISOFF_FOURCC_TFDT:{
+        gst_byte_reader_get_sub_reader (reader, &sub_reader,
+            size - header_size);
+        if (!gst_isoff_tfdt_box_parse (&traf->tfdt, &sub_reader))
+          goto error;
+        break;
+      }
+      case GST_ISOFF_FOURCC_TRUN:{
+        GstTrunBox trun;
+
+        gst_byte_reader_get_sub_reader (reader, &sub_reader,
+            size - header_size);
+        if (!gst_isoff_trun_box_parse (&trun, &sub_reader))
+          goto error;
+
+        g_array_append_val (traf->trun, trun);
+        break;
+      }
+      case GST_ISOFF_FOURCC_UUID:{
+        /* smooth-streaming specific */
+        if (memcmp (extended_type, tfrf_uuid, 16) == 0) {
+          if (traf->tfrf)
+            gst_isoff_tfrf_box_free (traf->tfrf);
+          traf->tfrf = g_new0 (GstTfrfBox, 1);
+          gst_byte_reader_get_sub_reader (reader, &sub_reader,
+              size - header_size);
+
+          if (!gst_isoff_tfrf_box_parse (traf->tfrf, &sub_reader))
+            goto error;
+        } else if (memcmp (extended_type, tfxd_uuid, 16) == 0) {
+          if (traf->tfxd)
+            g_free (traf->tfxd);
+          traf->tfxd = g_new0 (GstTfxdBox, 1);
+          gst_byte_reader_get_sub_reader (reader, &sub_reader,
+              size - header_size);
+
+          if (!gst_isoff_tfxd_box_parse (traf->tfxd, &sub_reader))
+            goto error;
+        } else {
+          gst_byte_reader_skip (reader, size - header_size);
+        }
+        break;
+      }
+      default:
+        gst_byte_reader_skip (reader, size - header_size);
+        break;
+    }
+  }
+
+  if (!had_tfhd)
+    goto error;
+
+  return TRUE;
+
+error:
+  gst_isoff_traf_box_clear (traf);
+
+  return FALSE;
+}
+
+GstMoofBox *
+gst_isoff_moof_box_parse (GstByteReader * reader)
+{
+  GstMoofBox *moof;
+  gboolean had_mfhd = FALSE;
+  GstByteReader sub_reader;
+
+
+  INITIALIZE_DEBUG_CATEGORY;
+  moof = g_new0 (GstMoofBox, 1);
+  moof->traf = g_array_new (FALSE, FALSE, sizeof (GstTrafBox));
+  g_array_set_clear_func (moof->traf,
+      (GDestroyNotify) gst_isoff_traf_box_clear);
+
+  while (gst_byte_reader_get_remaining (reader) > 0) {
+    guint32 fourcc;
+    guint header_size;
+    guint64 size;
+
+    if (!gst_isoff_parse_box_header (reader, &fourcc, NULL, &header_size,
+            &size))
+      goto error;
+    if (gst_byte_reader_get_remaining (reader) < size - header_size)
+      goto error;
+
+    switch (fourcc) {
+      case GST_ISOFF_FOURCC_MFHD:{
+        gst_byte_reader_get_sub_reader (reader, &sub_reader,
+            size - header_size);
+        if (!gst_isoff_mfhd_box_parse (&moof->mfhd, &sub_reader))
+          goto error;
+        had_mfhd = TRUE;
+        break;
+      }
+      case GST_ISOFF_FOURCC_TRAF:{
+        GstTrafBox traf;
+
+        gst_byte_reader_get_sub_reader (reader, &sub_reader,
+            size - header_size);
+        if (!gst_isoff_traf_box_parse (&traf, &sub_reader))
+          goto error;
+
+        g_array_append_val (moof->traf, traf);
+        break;
+      }
+      default:
+        gst_byte_reader_skip (reader, size - header_size);
+        break;
+    }
+  }
+
+  if (!had_mfhd)
+    goto error;
+
+  return moof;
+
+error:
+  gst_isoff_moof_box_free (moof);
+  return NULL;
+}
+
+void
+gst_isoff_moof_box_free (GstMoofBox * moof)
+{
+  g_array_free (moof->traf, TRUE);
+  g_free (moof);
+}
+
+static gboolean
+gst_isoff_mdhd_box_parse (GstMdhdBox * mdhd, GstByteReader * reader)
+{
+  guint8 version;
+
+  memset (mdhd, 0, sizeof (*mdhd));
+
+  if (gst_byte_reader_get_remaining (reader) < 4)
+    return FALSE;
+
+  version = gst_byte_reader_get_uint8_unchecked (reader);
+
+  if (!gst_byte_reader_skip (reader, 3))
+    return FALSE;
+
+  /* skip {creation, modification}_time, we don't have interest */
+  if (version == 1) {
+    if (!gst_byte_reader_skip (reader, 16))
+      return FALSE;
+  } else {
+    if (!gst_byte_reader_skip (reader, 8))
+      return FALSE;
+  }
+
+  if (!gst_byte_reader_get_uint32_be (reader, &mdhd->timescale))
+    return FALSE;
+
+  return TRUE;
+}
+
+static gboolean
+gst_isoff_hdlr_box_parse (GstHdlrBox * hdlr, GstByteReader * reader)
+{
+  memset (hdlr, 0, sizeof (*hdlr));
+
+  if (gst_byte_reader_get_remaining (reader) < 4)
+    return FALSE;
+
+  /* version & flag */
+  if (!gst_byte_reader_skip (reader, 4))
+    return FALSE;
+
+  /* pre_defined = 0 */
+  if (!gst_byte_reader_skip (reader, 4))
+    return FALSE;
+
+  if (!gst_byte_reader_get_uint32_le (reader, &hdlr->handler_type))
+    return FALSE;
+
+  return TRUE;
+}
+
+static gboolean
+gst_isoff_mdia_box_parse (GstMdiaBox * mdia, GstByteReader * reader)
+{
+  gboolean had_mdhd = FALSE, had_hdlr = FALSE;
+  while (gst_byte_reader_get_remaining (reader) > 0) {
+    guint32 fourcc;
+    guint header_size;
+    guint64 size;
+    GstByteReader sub_reader;
+
+    if (!gst_isoff_parse_box_header (reader, &fourcc, NULL, &header_size,
+            &size))
+      return FALSE;
+    if (gst_byte_reader_get_remaining (reader) < size - header_size)
+      return FALSE;
+
+    switch (fourcc) {
+      case GST_ISOFF_FOURCC_MDHD:{
+        gst_byte_reader_get_sub_reader (reader, &sub_reader,
+            size - header_size);
+        if (!gst_isoff_mdhd_box_parse (&mdia->mdhd, &sub_reader))
+          return FALSE;
+
+        had_mdhd = TRUE;
+        break;
+      }
+      case GST_ISOFF_FOURCC_HDLR:{
+        gst_byte_reader_get_sub_reader (reader, &sub_reader,
+            size - header_size);
+        if (!gst_isoff_hdlr_box_parse (&mdia->hdlr, &sub_reader))
+          return FALSE;
+
+        had_hdlr = TRUE;
+        break;
+      }
+      default:
+        gst_byte_reader_skip (reader, size - header_size);
+        break;
+    }
+  }
+
+  if (!had_mdhd || !had_hdlr)
+    return FALSE;
+
+  return TRUE;
+}
+
+static gboolean
+gst_isoff_tkhd_box_parse (GstTkhdBox * tkhd, GstByteReader * reader)
+{
+  guint8 version;
+
+  memset (tkhd, 0, sizeof (*tkhd));
+
+  if (gst_byte_reader_get_remaining (reader) < 4)
+    return FALSE;
+
+  if (!gst_byte_reader_get_uint8 (reader, &version))
+    return FALSE;
+
+  if (!gst_byte_reader_skip (reader, 3))
+    return FALSE;
+
+  /* skip {creation, modification}_time, we don't have interest */
+  if (version == 1) {
+    if (!gst_byte_reader_skip (reader, 16))
+      return FALSE;
+  } else {
+    if (!gst_byte_reader_skip (reader, 8))
+      return FALSE;
+  }
+
+  if (!gst_byte_reader_get_uint32_be (reader, &tkhd->track_id))
+    return FALSE;
+
+  return TRUE;
+}
+
+static gboolean
+gst_isoff_trak_box_parse (GstTrakBox * trak, GstByteReader * reader)
+{
+  gboolean had_mdia = FALSE, had_tkhd = FALSE;
+  while (gst_byte_reader_get_remaining (reader) > 0) {
+    guint32 fourcc;
+    guint header_size;
+    guint64 size;
+    GstByteReader sub_reader;
+
+    if (!gst_isoff_parse_box_header (reader, &fourcc, NULL, &header_size,
+            &size))
+      return FALSE;
+    if (gst_byte_reader_get_remaining (reader) < size - header_size)
+      return FALSE;
+
+    switch (fourcc) {
+      case GST_ISOFF_FOURCC_MDIA:{
+        gst_byte_reader_get_sub_reader (reader, &sub_reader,
+            size - header_size);
+        if (!gst_isoff_mdia_box_parse (&trak->mdia, &sub_reader))
+          return FALSE;
+
+        had_mdia = TRUE;
+        break;
+      }
+      case GST_ISOFF_FOURCC_TKHD:{
+        gst_byte_reader_get_sub_reader (reader, &sub_reader,
+            size - header_size);
+        if (!gst_isoff_tkhd_box_parse (&trak->tkhd, &sub_reader))
+          return FALSE;
+
+        had_tkhd = TRUE;
+        break;
+      }
+      default:
+        gst_byte_reader_skip (reader, size - header_size);
+        break;
+    }
+  }
+
+  if (!had_tkhd || !had_mdia)
+    return FALSE;
+
+  return TRUE;
+}
+
+GstMoovBox *
+gst_isoff_moov_box_parse (GstByteReader * reader)
+{
+  GstMoovBox *moov;
+  gboolean had_trak = FALSE;
+  moov = g_new0 (GstMoovBox, 1);
+  moov->trak = g_array_new (FALSE, FALSE, sizeof (GstTrakBox));
+
+  while (gst_byte_reader_get_remaining (reader) > 0) {
+    guint32 fourcc;
+    guint header_size;
+    guint64 size;
+
+    if (!gst_isoff_parse_box_header (reader, &fourcc, NULL, &header_size,
+            &size))
+      goto error;
+    if (gst_byte_reader_get_remaining (reader) < size - header_size)
+      goto error;
+
+    switch (fourcc) {
+      case GST_ISOFF_FOURCC_TRAK:{
+        GstByteReader sub_reader;
+        GstTrakBox trak;
+
+        gst_byte_reader_get_sub_reader (reader, &sub_reader,
+            size - header_size);
+        if (!gst_isoff_trak_box_parse (&trak, &sub_reader))
+          goto error;
+
+        had_trak = TRUE;
+        g_array_append_val (moov->trak, trak);
+        break;
+      }
+      default:
+        gst_byte_reader_skip (reader, size - header_size);
+        break;
+    }
+  }
+
+  if (!had_trak)
+    goto error;
+
+  return moov;
+
+error:
+  gst_isoff_moov_box_free (moov);
+  return NULL;
+}
+
+void
+gst_isoff_moov_box_free (GstMoovBox * moov)
+{
+  g_array_free (moov->trak, TRUE);
+  g_free (moov);
+}
+
+void
+gst_isoff_sidx_parser_init (GstSidxParser * parser)
+{
+  parser->status = GST_ISOFF_SIDX_PARSER_INIT;
+  parser->cumulative_entry_size = 0;
+  parser->sidx.entries = NULL;
+  parser->sidx.entries_count = 0;
+}
+
+void
+gst_isoff_sidx_parser_clear (GstSidxParser * parser)
+{
+  g_free (parser->sidx.entries);
+  memset (parser, 0, sizeof (*parser));
+
+  gst_isoff_sidx_parser_init (parser);
+}
+
+static void
+gst_isoff_parse_sidx_entry (GstSidxBoxEntry * entry, GstByteReader * reader)
+{
+  guint32 aux;
+
+  aux = gst_byte_reader_get_uint32_be_unchecked (reader);
+  entry->ref_type = aux >> 31;
+  entry->size = aux & 0x7FFFFFFF;
+  entry->duration = gst_byte_reader_get_uint32_be_unchecked (reader);
+  aux = gst_byte_reader_get_uint32_be_unchecked (reader);
+  entry->starts_with_sap = aux >> 31;
+  entry->sap_type = ((aux >> 28) & 0x7);
+  entry->sap_delta_time = aux & 0xFFFFFFF;
+}
+
+GstIsoffParserResult
+gst_isoff_sidx_parser_parse (GstSidxParser * parser,
+    GstByteReader * reader, guint * consumed)
+{
+  GstIsoffParserResult res = GST_ISOFF_PARSER_OK;
+  gsize remaining;
+
+  INITIALIZE_DEBUG_CATEGORY;
+  switch (parser->status) {
+    case GST_ISOFF_SIDX_PARSER_INIT:
+      /* Try again once we have enough data for the FullBox header */
+      if (gst_byte_reader_get_remaining (reader) < 4) {
+        gst_byte_reader_set_pos (reader, 0);
+        break;
+      }
+      parser->sidx.version = gst_byte_reader_get_uint8_unchecked (reader);
+      parser->sidx.flags = gst_byte_reader_get_uint24_le_unchecked (reader);
+
+      parser->status = GST_ISOFF_SIDX_PARSER_HEADER;
+
+    case GST_ISOFF_SIDX_PARSER_HEADER:
+      remaining = gst_byte_reader_get_remaining (reader);
+      if (remaining < 12 + (parser->sidx.version == 0 ? 8 : 16)) {
+        break;
+      }
+
+      parser->sidx.ref_id = gst_byte_reader_get_uint32_be_unchecked (reader);
+      parser->sidx.timescale = gst_byte_reader_get_uint32_be_unchecked (reader);
+      if (parser->sidx.version == 0) {
+        parser->sidx.earliest_pts =
+            gst_byte_reader_get_uint32_be_unchecked (reader);
+        parser->sidx.first_offset =
+            gst_byte_reader_get_uint32_be_unchecked (reader);
+      } else {
+        parser->sidx.earliest_pts =
+            gst_byte_reader_get_uint64_be_unchecked (reader);
+        parser->sidx.first_offset =
+            gst_byte_reader_get_uint64_be_unchecked (reader);
+      }
+      /* skip 2 reserved bytes */
+      gst_byte_reader_skip_unchecked (reader, 2);
+      parser->sidx.entries_count =
+          gst_byte_reader_get_uint16_be_unchecked (reader);
+
+      GST_LOG ("Timescale: %" G_GUINT32_FORMAT, parser->sidx.timescale);
+      GST_LOG ("Earliest pts: %" G_GUINT64_FORMAT, parser->sidx.earliest_pts);
+      GST_LOG ("First offset: %" G_GUINT64_FORMAT, parser->sidx.first_offset);
+
+      parser->cumulative_pts =
+          gst_util_uint64_scale_int_round (parser->sidx.earliest_pts,
+          GST_SECOND, parser->sidx.timescale);
+
+      if (parser->sidx.entries_count) {
+        parser->sidx.entries =
+            g_malloc (sizeof (GstSidxBoxEntry) * parser->sidx.entries_count);
+      }
+      parser->sidx.entry_index = 0;
+
+      parser->status = GST_ISOFF_SIDX_PARSER_DATA;
+
+    case GST_ISOFF_SIDX_PARSER_DATA:
+      while (parser->sidx.entry_index < parser->sidx.entries_count) {
+        GstSidxBoxEntry *entry =
+            &parser->sidx.entries[parser->sidx.entry_index];
+
+        remaining = gst_byte_reader_get_remaining (reader);
+        if (remaining < 12)
+          break;
+
+        entry->offset = parser->cumulative_entry_size;
+        entry->pts = parser->cumulative_pts;
+        gst_isoff_parse_sidx_entry (entry, reader);
+        entry->duration = gst_util_uint64_scale_int_round (entry->duration,
+            GST_SECOND, parser->sidx.timescale);
+        parser->cumulative_entry_size += entry->size;
+        parser->cumulative_pts += entry->duration;
+
+        GST_LOG ("Sidx entry %d) offset: %" G_GUINT64_FORMAT ", pts: %"
+            GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT " - size %"
+            G_GUINT32_FORMAT, parser->sidx.entry_index, entry->offset,
+            GST_TIME_ARGS (entry->pts), GST_TIME_ARGS (entry->duration),
+            entry->size);
+
+        parser->sidx.entry_index++;
+      }
+
+      if (parser->sidx.entry_index == parser->sidx.entries_count)
+        parser->status = GST_ISOFF_SIDX_PARSER_FINISHED;
+      else
+        break;
+    case GST_ISOFF_SIDX_PARSER_FINISHED:
+      parser->sidx.entry_index = 0;
+      res = GST_ISOFF_PARSER_DONE;
+      break;
+  }
+
+  *consumed = gst_byte_reader_get_pos (reader);
+
+  return res;
+}
+
+GstIsoffParserResult
+gst_isoff_sidx_parser_add_buffer (GstSidxParser * parser, GstBuffer * buffer,
+    guint * consumed)
+{
+  GstIsoffParserResult res = GST_ISOFF_PARSER_OK;
+  GstByteReader reader;
+  GstMapInfo info;
+  guint32 fourcc;
+
+  INITIALIZE_DEBUG_CATEGORY;
+  if (!gst_buffer_map (buffer, &info, GST_MAP_READ)) {
+    *consumed = 0;
+    return GST_ISOFF_PARSER_ERROR;
+  }
+
+  gst_byte_reader_init (&reader, info.data, info.size);
+
+  if (parser->status == GST_ISOFF_SIDX_PARSER_INIT) {
+    if (!gst_isoff_parse_box_header (&reader, &fourcc, NULL, NULL,
+            &parser->size))
+      goto done;
+
+    if (fourcc != GST_ISOFF_FOURCC_SIDX) {
+      res = GST_ISOFF_PARSER_UNEXPECTED;
+      gst_byte_reader_set_pos (&reader, 0);
+      goto done;
+    }
+
+    if (parser->size == 0) {
+      res = GST_ISOFF_PARSER_ERROR;
+      gst_byte_reader_set_pos (&reader, 0);
+      goto done;
+    }
+
+    /* Try again once we have enough data for the FullBox header */
+    if (gst_byte_reader_get_remaining (&reader) < 4) {
+      gst_byte_reader_set_pos (&reader, 0);
+      goto done;
+    }
+  }
+
+  res = gst_isoff_sidx_parser_parse (parser, &reader, consumed);
+
+done:
+  gst_buffer_unmap (buffer, &info);
+  return res;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/gstisoff.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/gstisoff.h
new file mode 100644 (file)
index 0000000..43d3375
--- /dev/null
@@ -0,0 +1,299 @@
+/*
+ * ISO File Format parsing library
+ *
+ * gstisoff.h
+ *
+ * Copyright (C) 2015 Samsung Electronics. All rights reserved.
+ *   Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_ISOFF_H__
+#define __GST_ISOFF_H__
+
+#include <gst/gst.h>
+#include <gst/base/base.h>
+
+G_BEGIN_DECLS
+
+#ifndef GST_ISOFF_API
+# ifdef BUILDING_GST_ISOFF
+#  define GST_ISOFF_API GST_API_EXPORT         /* from config.h */
+# else
+#  define GST_ISOFF_API GST_API_IMPORT
+# endif
+#endif
+
+typedef enum {
+  GST_ISOFF_PARSER_OK,
+  GST_ISOFF_PARSER_DONE,
+  GST_ISOFF_PARSER_UNEXPECTED,
+  GST_ISOFF_PARSER_ERROR
+} GstIsoffParserResult;
+
+GST_ISOFF_API
+gboolean gst_isoff_parse_box_header (GstByteReader * reader, guint32 * type, guint8 extended_type[16], guint * header_size, guint64 * size);
+
+#define GST_ISOFF_FOURCC_UUID GST_MAKE_FOURCC('u','u','i','d')
+#define GST_ISOFF_FOURCC_MOOF GST_MAKE_FOURCC('m','o','o','f')
+#define GST_ISOFF_FOURCC_MFHD GST_MAKE_FOURCC('m','f','h','d')
+#define GST_ISOFF_FOURCC_TFHD GST_MAKE_FOURCC('t','f','h','d')
+#define GST_ISOFF_FOURCC_TRUN GST_MAKE_FOURCC('t','r','u','n')
+#define GST_ISOFF_FOURCC_TRAF GST_MAKE_FOURCC('t','r','a','f')
+#define GST_ISOFF_FOURCC_TFDT GST_MAKE_FOURCC('t','f','d','t')
+#define GST_ISOFF_FOURCC_MDAT GST_MAKE_FOURCC('m','d','a','t')
+#define GST_ISOFF_FOURCC_MOOV GST_MAKE_FOURCC('m','o','o','v')
+#define GST_ISOFF_FOURCC_TRAK GST_MAKE_FOURCC('t','r','a','k')
+#define GST_ISOFF_FOURCC_TKHD GST_MAKE_FOURCC('t','k','h','d')
+#define GST_ISOFF_FOURCC_MDIA GST_MAKE_FOURCC('m','d','i','a')
+#define GST_ISOFF_FOURCC_MDHD GST_MAKE_FOURCC('m','d','h','d')
+#define GST_ISOFF_FOURCC_HDLR GST_MAKE_FOURCC('h','d','l','r')
+#define GST_ISOFF_FOURCC_SIDX GST_MAKE_FOURCC('s','i','d','x')
+
+/* handler type */
+#define GST_ISOFF_FOURCC_SOUN GST_MAKE_FOURCC('s','o','u','n')
+#define GST_ISOFF_FOURCC_VIDE GST_MAKE_FOURCC('v','i','d','e')
+
+#define GST_ISOFF_SAMPLE_FLAGS_IS_LEADING(flags)                   (((flags) >> 26) & 0x03)
+#define GST_ISOFF_SAMPLE_FLAGS_SAMPLE_DEPENDS_ON(flags)            (((flags) >> 24) & 0x03)
+#define GST_ISOFF_SAMPLE_FLAGS_SAMPLE_IS_DEPENDED_ON(flags)        (((flags) >> 22) & 0x03)
+#define GST_ISOFF_SAMPLE_FLAGS_SAMPLE_HAS_REDUNDANCY(flags)        (((flags) >> 20) & 0x03)
+#define GST_ISOFF_SAMPLE_FLAGS_SAMPLE_PADDING_VALUE(flags)         (((flags) >> 17) & 0x07)
+#define GST_ISOFF_SAMPLE_FLAGS_SAMPLE_IS_NON_SYNC_SAMPLE(flags)    (((flags) >> 16) & 0x01)
+#define GST_ISOFF_SAMPLE_FLAGS_SAMPLE_DEGRADATION_PRIORITY(flags)  (((flags) >>  0) & 0x0f)
+
+/* Smooth-Streaming specific boxes */
+typedef struct _GstTfxdBox
+{
+  guint8 version;
+  guint32 flags;
+
+  guint64 time;
+  guint64 duration;
+} GstTfxdBox;
+
+typedef struct _GstTfrfBoxEntry
+{
+  guint64 time;
+  guint64 duration;
+} GstTfrfBoxEntry;
+
+typedef struct _GstTfrfBox
+{
+  guint8 version;
+  guint32 flags;
+
+  gint entries_count;
+  GArray *entries;
+} GstTfrfBox;
+
+/* Common boxes */
+typedef struct _GstMfhdBox
+{
+  guint32 sequence_number;
+} GstMfhdBox;
+
+typedef enum
+{
+  GST_TFHD_FLAGS_BASE_DATA_OFFSET_PRESENT         = 0x000001,
+  GST_TFHD_FLAGS_SAMPLE_DESCRIPTION_INDEX_PRESENT = 0x000002,
+  GST_TFHD_FLAGS_DEFAULT_SAMPLE_DURATION_PRESENT  = 0x000008,
+  GST_TFHD_FLAGS_DEFAULT_SAMPLE_SIZE_PRESENT      = 0x000010,
+  GST_TFHD_FLAGS_DEFAULT_SAMPLE_FLAGS_PRESENT     = 0x000020,
+  GST_TFHD_FLAGS_DURATION_IS_EMPTY                = 0x010000,
+  GST_TFHD_FLAGS_DEFAULT_BASE_IS_MOOF             = 0x020000
+} GstTfhdFlags;
+
+typedef struct _GstTfhdBox
+{
+  guint8 version;
+  GstTfhdFlags flags;
+
+  guint32 track_id;
+
+  /* optional */
+  guint64 base_data_offset;
+  guint32 sample_description_index;
+  guint32 default_sample_duration;
+  guint32 default_sample_size;
+  guint32 default_sample_flags;
+} GstTfhdBox;
+
+typedef enum
+{
+  GST_TRUN_FLAGS_DATA_OFFSET_PRESENT                     = 0x000001,
+  GST_TRUN_FLAGS_FIRST_SAMPLE_FLAGS_PRESENT              = 0x000004,
+  GST_TRUN_FLAGS_SAMPLE_DURATION_PRESENT                 = 0x000100,
+  GST_TRUN_FLAGS_SAMPLE_SIZE_PRESENT                     = 0x000200,
+  GST_TRUN_FLAGS_SAMPLE_FLAGS_PRESENT                    = 0x000400,
+  GST_TRUN_FLAGS_SAMPLE_COMPOSITION_TIME_OFFSETS_PRESENT = 0x000800
+} GstTrunFlags;
+
+typedef struct _GstTrunBox
+{
+  guint8 version;
+  GstTrunFlags flags;
+
+  guint32 sample_count;
+
+  /* optional */
+  gint32 data_offset;
+  guint32 first_sample_flags;
+  GArray *samples;
+} GstTrunBox;
+
+typedef struct _GstTrunSample
+{
+  guint32 sample_duration;
+  guint32 sample_size;
+  guint32 sample_flags;
+
+  union {
+    guint32 u; /* version 0 */
+    gint32  s; /* others */
+  } sample_composition_time_offset;
+} GstTrunSample;
+
+typedef struct _GstTdftBox
+{
+  guint64 decode_time;
+} GstTfdtBox;
+
+typedef struct _GstTrafBox
+{
+  GstTfhdBox tfhd;
+  GstTfdtBox tfdt;
+  GArray *trun;
+
+  /* smooth-streaming specific */
+  GstTfrfBox *tfrf;
+  GstTfxdBox *tfxd;
+} GstTrafBox;
+
+typedef struct _GstMoofBox
+{
+  GstMfhdBox mfhd;
+  GArray *traf;
+} GstMoofBox;
+
+GST_ISOFF_API
+GstMoofBox * gst_isoff_moof_box_parse (GstByteReader *reader);
+
+GST_ISOFF_API
+void gst_isoff_moof_box_free (GstMoofBox *moof);
+
+typedef struct _GstTkhdBox
+{
+  guint32 track_id;
+} GstTkhdBox;
+
+typedef struct _GstMdhdBox
+{
+  guint32 timescale;
+} GstMdhdBox;
+
+typedef struct _GstHdlrBox
+{
+  guint32 handler_type;
+} GstHdlrBox;
+
+typedef struct _GstMdiaBox
+{
+  GstMdhdBox mdhd;
+  GstHdlrBox hdlr;
+} GstMdiaBox;
+
+typedef struct _GstTrakBox
+{
+  GstTkhdBox tkhd;
+  GstMdiaBox mdia;
+} GstTrakBox;
+
+typedef struct _GstMoovBox
+{
+  GArray *trak;
+} GstMoovBox;
+
+GST_ISOFF_API
+GstMoovBox * gst_isoff_moov_box_parse (GstByteReader *reader);
+
+GST_ISOFF_API
+void gst_isoff_moov_box_free (GstMoovBox *moov);
+
+typedef struct _GstSidxBoxEntry
+{
+  gboolean ref_type;
+  guint32 size;
+  GstClockTime duration;
+  gboolean starts_with_sap;
+  guint8 sap_type;
+  guint32 sap_delta_time;
+
+  guint64 offset;
+  GstClockTime pts;
+} GstSidxBoxEntry;
+
+typedef struct _GstSidxBox
+{
+  guint8 version;
+  guint32 flags;
+
+  guint32 ref_id;
+  guint32 timescale;
+  guint64 earliest_pts;
+  guint64 first_offset;
+
+  gint entry_index;
+  gint entries_count;
+
+  GstSidxBoxEntry *entries;
+} GstSidxBox;
+
+typedef enum _GstSidxParserStatus
+{
+  GST_ISOFF_SIDX_PARSER_INIT,
+  GST_ISOFF_SIDX_PARSER_HEADER,
+  GST_ISOFF_SIDX_PARSER_DATA,
+  GST_ISOFF_SIDX_PARSER_FINISHED
+} GstSidxParserStatus;
+
+typedef struct _GstSidxParser
+{
+  GstSidxParserStatus status;
+
+  guint64 size;
+  guint64 cumulative_entry_size;
+  guint64 cumulative_pts;
+
+  GstSidxBox sidx;
+} GstSidxParser;
+
+GST_ISOFF_API
+void gst_isoff_sidx_parser_init (GstSidxParser * parser);
+
+GST_ISOFF_API
+void gst_isoff_sidx_parser_clear (GstSidxParser * parser);
+
+GST_ISOFF_API
+GstIsoffParserResult gst_isoff_sidx_parser_parse (GstSidxParser * parser, GstByteReader * reader, guint * consumed);
+
+GST_ISOFF_API
+GstIsoffParserResult gst_isoff_sidx_parser_add_buffer (GstSidxParser * parser, GstBuffer * buf, guint * consumed);
+
+G_END_DECLS
+
+#endif /* __GST_ISOFF_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlsdemux-util.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlsdemux-util.c
new file mode 100644 (file)
index 0000000..813d74c
--- /dev/null
@@ -0,0 +1,1027 @@
+/* GStreamer
+ * Copyright (C) 2016 Jan Schmidt <jan@centricular.com>
+ * Copyright (C) 2016 Tim-Philipp Müller <tim@centricular.com>
+ *
+ * gsthlsdemux-util.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifdef HAVE_CONFIG_H
+#  include "config.h"
+#endif
+
+#include <stdio.h>
+
+#include <gmodule.h>
+
+#include <gst/gst.h>
+#include <gst/tag/tag.h>
+#include <string.h>
+
+#include "gsthlsdemux.h"
+
+GST_DEBUG_CATEGORY_EXTERN (gst_hls_demux2_debug);
+#define GST_CAT_DEFAULT gst_hls_demux2_debug
+
+
+/* Mpeg-TS Packet */
+#define TS_PACKET_SYNC_BYTE 0x47
+
+#define TS_PACKET_TRANSPORT_ERROR_INDICATOR(packet) \
+  ((packet)[1] & 0x80)
+#define TS_PACKET_PAYLOAD_UNIT_START(packet) \
+  ((packet)[1] & 0x40)
+
+#define TS_PACKET_PID(packet)                          \
+  ((guint16) ((packet)[1] & 0x1f) << 8 | (packet)[2])
+
+#define TS_PACKET_TRANSPORT_SCRAMBLING_CONTROL(packet) \
+  ((packet)[3] & 0xc0)
+#define TS_PACKET_HAS_ADAPTATION_FIELD(packet) \
+  ((packet)[3] & 0x20)
+#define TS_PACKET_HAS_PAYLOAD(packet)          \
+  ((packet)[3] & 0x10)
+#define TS_PACKET_CONTINUITY_COUNTER(pacet)    \
+  ((packet)[3] & 0x0f)
+
+#define TS_PACKET_ADAPTATION_FIELD(packet)     \
+  (TS_PACKET_HAS_ADAPTATION_FIELD(packet) ?    \
+   (packet) + 4 : NULL)
+
+/* Adaptation field size. Note: can be 0 */
+#define TS_PACKET_ADAPTATION_FIELD_SIZE(packet)        \
+  (packet)[4]
+
+
+#define TS_PACKET_PAYLOAD_OFFSET(packet)               \
+  (TS_PACKET_HAS_ADAPTATION_FIELD (packet) ?           \
+   4 + TS_PACKET_ADAPTATION_FIELD_SIZE (packet) + 1 :  \
+   4)
+
+#define TS_PACKET_PAYLOAD(packet)                      \
+  (TS_PACKET_HAS_PAYLOAD (packet) ?                    \
+   (packet) + TS_PACKET_PAYLOAD_OFFSET(packet) :       \
+   NULL)
+
+/* PES Packet */
+
+#define PES_IS_VALID(pes) ((pes)[0] == 0x00 && \
+                          (pes)[1] == 0x00 &&  \
+                          (pes)[2] == 0x01)
+
+#define PES_STREAM_ID(pes) ((pes)[3])
+
+#define PES_PACKET_LENGTH(pes)                 \
+  ((guint16) (((pes)[4] << 8) | (pes)[5]))
+
+#define PES_STREAM_TYPE_HAS_HEADER(stream_type) \
+  (stream_type != 0xac)
+
+#define PES_HEADER_DATA_LENGTH(pes) ((pes)[8])
+#define PES_PAYLOAD_DATA_OFFSET(pes) \
+  (9 + PES_HEADER_DATA_LENGTH (pes))
+
+#define PES_HAS_PTS(pes) ((pes)[7] & 0x80)
+#define PES_HAS_DTS(pes) ((pes)[7] & 0x40)
+
+/* SI/PSI Packet */
+
+#define TS_SECTION_POINTER(payload) ((payload)[0])
+#define TS_PACKET_GET_SECTION(payload) ((payload) + TS_SECTION_POINTER(payload))
+
+/* PAT section */
+#define PAT_PROGRAM_OFFSET(pat, idx) \
+  (7 + (idx) * 4)
+#define PAT_PROGRAM_PID_OFFSET(pat, idx) \
+  (PAT_PROGRAM_PID_OFFSET(pat,idx) + 2)
+#define PAT_GET_PROGRAM_PID(pat, idx) \
+  (pat[ + PAT_PROGRAM_OFFSET(pat, idx) + 2)
+
+static inline gboolean
+read_ts (const guint8 * data, guint64 * target)
+{
+/* sync:4 == 00xx ! pts:3 ! 1 ! pts:15 ! 1 | pts:15 ! 1 */
+  if ((*data & 0x01) != 0x01)
+    return FALSE;
+  *target = ((guint64) (*data++ & 0x0E)) << 29;
+  *target |= ((guint64) (*data++)) << 22;
+  if ((*data & 0x01) != 0x01)
+    return FALSE;
+  *target |= ((guint64) (*data++ & 0xFE)) << 14;
+  *target |= ((guint64) (*data++)) << 7;
+  if ((*data & 0x01) != 0x01)
+    return FALSE;
+  *target |= ((guint64) (*data++ & 0xFE)) >> 1;
+
+  return TRUE;
+}
+
+#define PES_PTS_OFFSET(pes) (9)
+#define PES_PTS(pes, dest) (read_ts ((pes) + PES_PTS_OFFSET(pes), dest))
+
+#define PES_DTS_OFFSET(pes) (PES_HAS_PTS(pes) ? 9 + 5 : 9)
+#define PES_DTS(pes, dest) (read_ts ((pes) + PES_DTS_OFFSET(pes), dest))
+
+
+/* Check for sync byte, error_indicator == 0 and packet has payload.
+ * Adaptation control field (data[3] & 0x30) may be zero for TS packets with
+ * null PIDs. Still, these streams are valid TS streams (for null packets,
+ * AFC is supposed to be 0x1, but the spec also says decoders should just
+ * discard any packets with AFC = 0x00) */
+#define IS_MPEGTS_HEADER(data) (data[0] == 0x47 && \
+                                (data[1] & 0x80) == 0x00 && \
+                                ((data[3] & 0x30) != 0x00 || \
+                                ((data[3] & 0x30) == 0x00 && (data[1] & 0x1f) == 0x1f && (data[2] & 0xff) == 0xff)))
+
+#define PCRTIME_TO_GSTTIME(t) (((t) * (guint64)1000) / 27)
+#define MPEGTIME_TO_GSTTIME(t) (((t) * (guint64)100000) / 9)
+
+static gboolean
+have_ts_sync (const guint8 * data, guint size, guint packet_size, guint num)
+{
+  while (num-- > 0) {
+    if (size < packet_size)
+      return FALSE;
+    if (!IS_MPEGTS_HEADER (data))
+      return FALSE;
+    data += packet_size;
+    size -= packet_size;
+  }
+  return TRUE;
+}
+
+#define GST_MPEGTS_TYPEFIND_MIN_HEADERS 4
+
+static gint
+find_offset (const guint8 * data, guint size, guint * out_packet_size)
+{
+  guint sync_points = CLAMP (size / 188, GST_MPEGTS_TYPEFIND_MIN_HEADERS, 100);
+  guint off;
+  const gint packet_size = 188;
+
+  /* FIXME: check 192 as well, and maybe also 204, 208 */
+  for (off = 0; off < MIN (size, packet_size); ++off) {
+    if (have_ts_sync (data + off, size - off, packet_size, sync_points)) {
+      *out_packet_size = packet_size;
+      return off;
+    }
+  }
+  return -1;
+}
+
+static gboolean
+handle_pmt (const guint8 * data, guint size, guint packet_size)
+{
+  const guint8 *p = data;
+  guint32 hdr = GST_READ_UINT32_BE (p);
+  guint slen, pcr_pid, pilen;
+
+  GST_MEMDUMP ("PMT", data, size);
+  data = p + 4;
+  if ((hdr & 0x00000020) != 0)  /* has_adaptation_field */
+    data += 1 + p[4];           /* adaptation_field_len */
+  data += 1 + data[0];          /* pointer_field */
+  if (data[0] != 0x02)          /* table_id */
+    return FALSE;
+  //gst_util_dump_mem (data, 8);
+  /* we assume the entire PMT fits into a single packet and this is it */
+  if (data[6] != 0 || data[6] != data[7])
+    return FALSE;
+  slen = GST_READ_UINT16_BE (data + 1) & 0x0FFF;
+  if (slen > (gsize) (p + packet_size - (data + 1 + 2)) || slen < 5 + 2 + 4)
+    return FALSE;
+  data += 3 + 5;
+  slen -= 5;                    /* bytes after section_length field itself */
+  slen -= 4;                    /* crc at end */
+  pcr_pid = GST_READ_UINT16_BE (data) & 0x1fff;
+  if (pcr_pid != 0x1fff) {
+    GST_DEBUG ("pcr_pid: %04x", pcr_pid);
+  }
+  data += 2;
+  /* Skip global descriptors */
+  pilen = GST_READ_UINT16_BE (data + 1) & 0x0FFF;
+  data += 2 + pilen;
+
+
+  return FALSE;
+}
+
+static gboolean
+pat_get_pmt_pid (const guint8 * data, guint size, guint packet_size,
+    gint * pmt_pid)
+{
+  const guint8 *p = data;
+  guint32 hdr = GST_READ_UINT32_BE (p);
+  guint slen;
+
+  data = p + 4;
+  if ((hdr & 0x00000020) != 0)  /* has_adaptation_field */
+    data += 1 + p[4];           /* adaptation_field_len */
+  data += 1 + data[0];          /* pointer_field */
+  if (data[0] != 0)             /* table_id */
+    return FALSE;
+  /* we assume the entire PAT fits into a single packet and this is it */
+  if (data[6] != 0 || data[6] != data[7])
+    return FALSE;
+  slen = GST_READ_UINT16_BE (data + 1) & 0x0FFF;
+  if (slen > (gsize) (p + packet_size - (data + 1 + 2)) || slen < 5 + 4 + 4)
+    return FALSE;
+  data += 3 + 5;
+  slen -= 5;                    /* bytes after section_length field itself */
+  slen -= 4;                    /* crc at end */
+  while (slen >= 4) {
+    guint program_num = GST_READ_UINT16_BE (data);
+    guint val = GST_READ_UINT16_BE (data + 2) & 0x1fff;
+    if (program_num != 0) {
+      GST_DEBUG ("  program %04x: pmt_pid : %04x", program_num, val);
+      *pmt_pid = val;
+      return TRUE;
+    }
+    data += 4;
+    slen -= 4;
+  }
+
+  return FALSE;
+}
+
+static GstClockTime
+get_first_mpegts_time (const guint8 * data, gsize size, guint packet_size)
+{
+  GstClockTime internal_time = GST_CLOCK_TIME_NONE;
+  const guint8 *p;
+  gint pmt_pid = -1;
+
+  for (p = data; size >= packet_size; p += packet_size, size -= packet_size) {
+    if (p[0] != TS_PACKET_SYNC_BYTE) {
+      GST_WARNING ("Lost sync");
+      break;
+    }
+
+    /* We only care about start packets which have some form of payload (pes or
+       section) */
+    if (TS_PACKET_PAYLOAD_UNIT_START (p) && TS_PACKET_HAS_PAYLOAD (p)) {
+      guint16 pid;
+      const guint8 *payload;
+      const guint8 *afc;
+
+      /* Skip packets which have error indicator set or are scrambled */
+      if (G_UNLIKELY (TS_PACKET_TRANSPORT_ERROR_INDICATOR (p) ||
+              TS_PACKET_TRANSPORT_SCRAMBLING_CONTROL (p)))
+        continue;
+
+      pid = TS_PACKET_PID (p);
+      payload = TS_PACKET_PAYLOAD (p);
+      afc = TS_PACKET_ADAPTATION_FIELD (p);
+
+      GST_LOG ("PID 0x%04x", pid);
+      if (afc && afc[0])
+        GST_MEMDUMP ("afc", afc, afc[0]);
+      GST_MEMDUMP ("payload", payload, 32);
+      if (pmt_pid != -1 && PES_IS_VALID (payload)) {
+        guint64 ts;
+        GstClockTime pts, dts;
+
+        pts = dts = GST_CLOCK_TIME_NONE;
+
+        GST_DEBUG ("PID 0x%04x stream_id 0x%02x PES start", pid,
+            PES_STREAM_ID (payload));
+        GST_MEMDUMP ("PES data", payload + PES_PAYLOAD_DATA_OFFSET (payload),
+            32);
+
+        /* Grab PTS/DTS */
+        if (PES_HAS_PTS (payload) && PES_PTS (payload, &ts)) {
+          pts = MPEGTIME_TO_GSTTIME (ts);
+          GST_LOG ("PID 0x%04x PTS %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT,
+              pid, ts, GST_TIME_ARGS (pts));
+        }
+        if (PES_HAS_DTS (payload) && PES_DTS (payload, &ts)) {
+          dts = MPEGTIME_TO_GSTTIME (ts);
+          GST_LOG ("PID 0x%04x DTS %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT,
+              pid, ts, GST_TIME_ARGS (dts));
+        }
+
+        /* Pick the lowest value */
+        if (GST_CLOCK_TIME_IS_VALID (dts)) {
+          if (GST_CLOCK_TIME_IS_VALID (pts)) {
+            /* Only take the PTS if it's lower than the dts and does not differ
+             * by more than a second (which would indicate bogus values) */
+            if (pts < dts && ABS (pts - dts) < GST_SECOND)
+              internal_time = pts;
+            else
+              internal_time = dts;
+          } else {
+            internal_time = dts;
+          }
+          goto out;
+        } else if (GST_CLOCK_TIME_IS_VALID (pts)) {
+          internal_time = pts;
+          goto out;
+        }
+      } else if (pid == 0x00) {
+        GST_DEBUG ("PAT !");
+        if (!pat_get_pmt_pid (p, packet_size, packet_size, &pmt_pid)) {
+          GST_WARNING ("Invalid PAT");
+          goto out;
+        }
+      } else if (pmt_pid != -1 && pid == pmt_pid) {
+        GST_DEBUG ("PMT !");
+        /* FIXME : Grab the list of *actual* elementary stream PID to make sure
+         * we have checked the first PTS of each stream (and not just the first
+         * one we saw, which might not be the smallest */
+        handle_pmt (p, packet_size, packet_size);
+      }
+    }
+  }
+
+out:
+  return internal_time;
+}
+
+GstHLSParserResult
+gst_hlsdemux_handle_content_mpegts (GstHLSDemux * demux,
+    GstHLSDemuxStream * hls_stream, gboolean draining, GstBuffer ** buffer)
+{
+  GstMapInfo info;
+  gint offset;
+  const guint8 *data;
+  GstClockTime internal_time = GST_CLOCK_TIME_NONE;
+  guint packet_size;
+  gsize size;
+
+  if (!gst_buffer_map (*buffer, &info, GST_MAP_READ))
+    return GST_HLS_PARSER_RESULT_ERROR;
+
+  data = info.data;
+  size = info.size;
+
+  offset = find_offset (data, size, &packet_size);
+  if (offset < 0) {
+    gst_buffer_unmap (*buffer, &info);
+    return GST_HLS_PARSER_RESULT_ERROR;
+  }
+
+  GST_LOG ("TS packet start offset: %d", offset);
+
+  data += offset;
+  size -= offset;
+
+  internal_time = get_first_mpegts_time (data, size, packet_size);
+
+  GST_DEBUG_OBJECT (hls_stream, "Using internal time %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (internal_time));
+
+  gst_buffer_unmap (*buffer, &info);
+
+  if (!GST_CLOCK_TIME_IS_VALID (internal_time))
+    return GST_HLS_PARSER_RESULT_NEED_MORE_DATA;
+
+  /* We have the first internal time, figure out if we are in sync or not */
+  return gst_hlsdemux_handle_internal_time (demux, hls_stream, internal_time);
+}
+
+GstHLSParserResult
+gst_hlsdemux_handle_content_isobmff (GstHLSDemux * demux,
+    GstHLSDemuxStream * hls_stream, gboolean draining, GstBuffer ** buffer)
+{
+  GstMapInfo info;
+  GstByteReader br, sub;
+  guint32 box_type;
+  guint header_size;
+  guint64 box_size;
+  GstHLSParserResult ret = GST_HLS_PARSER_RESULT_NEED_MORE_DATA;
+  GstClockTime smallest_ts = GST_CLOCK_TIME_NONE;
+
+  if (!gst_buffer_map (*buffer, &info, GST_MAP_READ))
+    return GST_HLS_PARSER_RESULT_ERROR;
+
+  gst_byte_reader_init (&br, info.data, info.size);
+
+  while (gst_byte_reader_get_remaining (&br) &&
+      gst_isoff_parse_box_header (&br, &box_type, NULL, &header_size,
+          &box_size)) {
+    GST_DEBUG ("box %" GST_FOURCC_FORMAT " size:%" G_GUINT64_FORMAT,
+        GST_FOURCC_ARGS (box_type), box_size);
+
+    GST_MEMDUMP ("box content", br.data + br.byte, MIN (256,
+            box_size - header_size));
+
+    switch (box_type) {
+      case GST_ISOFF_FOURCC_MOOV:
+      {
+        GstMoovBox *moov;
+        gst_byte_reader_get_sub_reader (&br, &sub, box_size - header_size);
+        moov = gst_isoff_moov_box_parse (&sub);
+
+        if (moov) {
+          GST_DEBUG ("Got moov box");
+          if (hls_stream->moov)
+            gst_isoff_moov_box_free (hls_stream->moov);
+          hls_stream->moov = moov;
+        }
+        break;
+      }
+      case GST_ISOFF_FOURCC_MOOF:
+      {
+        GstMoofBox *moof;
+
+        gst_byte_reader_get_sub_reader (&br, &sub, box_size - header_size);
+
+        moof = gst_isoff_moof_box_parse (&sub);
+
+        if (moof) {
+          guint i, j;
+          GST_DEBUG ("Got moof box");
+          /* Use the track information from stream->moov */
+          for (i = 0; i < hls_stream->moov->trak->len; i++) {
+            GstTrakBox *trak =
+                &g_array_index (hls_stream->moov->trak, GstTrakBox, i);
+            GST_LOG ("trak #%d %p", i, trak);
+            for (j = 0; j < moof->traf->len; j++) {
+              GstTrafBox *traf = &g_array_index (moof->traf, GstTrafBox, j);
+              if (traf->tfhd.track_id == trak->tkhd.track_id) {
+                GstClockTime ts = 0;
+                guint64 decode_time = traf->tfdt.decode_time;
+
+                if (decode_time != GST_CLOCK_TIME_NONE)
+                  ts = gst_util_uint64_scale (decode_time, GST_SECOND,
+                      trak->mdia.mdhd.timescale);
+
+                GST_LOG ("Found decode_time %" GST_TIME_FORMAT " for trak %d",
+                    GST_TIME_ARGS (ts), traf->tfhd.track_id);
+                if (smallest_ts == GST_CLOCK_TIME_NONE || ts < smallest_ts)
+                  smallest_ts = ts;
+              }
+            }
+          }
+          gst_isoff_moof_box_free (moof);
+        } else {
+          GST_WARNING ("Failed to parse moof");
+        }
+        if (smallest_ts != GST_CLOCK_TIME_NONE)
+          goto out;
+        break;
+      }
+      case GST_ISOFF_FOURCC_MDAT:
+        GST_DEBUG ("Reached `mdat`, returning");
+        goto out;
+        break;
+      default:
+        GST_LOG ("Skipping unhandled box %" GST_FOURCC_FORMAT,
+            GST_FOURCC_ARGS (box_type));
+        gst_byte_reader_skip (&br, box_size - header_size);
+        break;
+    }
+
+  }
+
+out:
+  gst_buffer_unmap (*buffer, &info);
+
+  if (smallest_ts != GST_CLOCK_TIME_NONE) {
+    ret = gst_hlsdemux_handle_internal_time (demux, hls_stream, smallest_ts);
+  }
+
+  return ret;
+}
+
+
+GstHLSParserResult
+gst_hlsdemux_handle_content_id3 (GstHLSDemux * demux,
+    GstHLSDemuxStream * hls_stream, gboolean draining, GstBuffer ** buffer)
+{
+  GstMapInfo info;
+  guint32 tag_size;
+  gsize size;
+  GstTagList *taglist;
+  GstSample *priv_data = NULL;
+  GstBuffer *tag_buf;
+  guint64 pts;
+  GstHLSParserResult ret = GST_HLS_PARSER_RESULT_DONE;
+  GstClockTime internal;
+
+  /* We need at least 10 bytes, starting with "ID3" for the header */
+  size = gst_buffer_get_size (*buffer);
+  if (size < 10)
+    return GST_HLS_PARSER_RESULT_NEED_MORE_DATA;
+
+  /* Read the tag size */
+  tag_size = gst_tag_get_id3v2_tag_size (*buffer);
+
+  /* Check we've collected that much */
+  if (size < tag_size)
+    return GST_HLS_PARSER_RESULT_NEED_MORE_DATA;
+
+  /* Parse the tag */
+  taglist = gst_tag_list_from_id3v2_tag (*buffer);
+  if (taglist == NULL) {
+    return GST_HLS_PARSER_RESULT_ERROR; /* Invalid tag, stop trying */
+  }
+
+  /* Extract the timestamps */
+  if (!gst_tag_list_get_sample (taglist, GST_TAG_PRIVATE_DATA, &priv_data))
+    goto out;
+
+  if (!g_str_equal ("com.apple.streaming.transportStreamTimestamp",
+          gst_structure_get_string (gst_sample_get_info (priv_data), "owner")))
+    goto out;
+
+  /* OK, now as per section 3, the tag contains a 33-bit PCR inside a 64-bit
+   * BE-word */
+  tag_buf = gst_sample_get_buffer (priv_data);
+  if (tag_buf == NULL)
+    goto out;
+
+  if (!gst_buffer_map (tag_buf, &info, GST_MAP_READ))
+    goto out;
+  GST_MEMDUMP ("id3 tag", info.data, info.size);
+
+  pts = GST_READ_UINT64_BE (info.data);
+  internal = MPEGTIME_TO_GSTTIME (pts);
+
+  GST_LOG ("Got internal PTS from ID3: %" G_GUINT64_FORMAT " (%" GST_TIME_FORMAT
+      ")", pts, GST_TIME_ARGS (internal));
+
+  gst_buffer_unmap (tag_buf, &info);
+
+  ret = gst_hlsdemux_handle_internal_time (demux, hls_stream, internal);
+
+out:
+  if (priv_data)
+    gst_sample_unref (priv_data);
+  if (taglist)
+    gst_tag_list_unref (taglist);
+
+  return ret;
+}
+
+/* Grabs the next numerical value from the bytereader, skipping any spaces.
+ *
+ * It will stop/return at the next non-digit/non-space position */
+static gboolean
+byte_reader_get_next_uint_string (GstByteReader * br, guint * out)
+{
+  guint value = 0;
+  gboolean res = FALSE;
+
+  while (gst_byte_reader_get_remaining (br)) {
+    guint8 d = gst_byte_reader_peek_uint8_unchecked (br);
+
+    if (g_ascii_isdigit (d)) {
+      value = value * 10 + (d - '0');
+      res = TRUE;
+    } else if (d != ' ' && d != '\t') {
+      /* we're done and not advancing */
+      break;
+    }
+    gst_byte_reader_skip_unchecked (br, 1);
+  }
+
+  if (res)
+    *out = value;
+
+  return res;
+}
+
+/* Grabs the next numerical value from the bytereader, skipping any spaces.
+ *
+ * It will stop/return at the next non-digit/non-space position */
+static gboolean
+byte_reader_get_next_uint64_string (GstByteReader * br, guint64 * out)
+{
+  guint64 value = 0;
+  gboolean res = FALSE;
+
+  while (gst_byte_reader_get_remaining (br)) {
+    guint8 d = gst_byte_reader_peek_uint8_unchecked (br);
+
+    if (g_ascii_isdigit (d)) {
+      value = value * 10 + (d - '0');
+      res = TRUE;
+    } else if (d != ' ' && d != '\t') {
+      /* we're done and not advancing */
+      break;
+    }
+    gst_byte_reader_skip_unchecked (br, 1);
+  }
+
+  if (res)
+    *out = value;
+
+  return res;
+}
+
+static gboolean
+parse_webvtt_time (GstByteReader * br, GstClockTime * t,
+    const gchar ** remainder)
+{
+  GstClockTime val = 0;
+  gboolean res = FALSE;
+
+  while (!res && gst_byte_reader_get_remaining (br)) {
+    guint numval;
+    if (byte_reader_get_next_uint_string (br, &numval)) {
+      guint8 next = gst_byte_reader_peek_uint8_unchecked (br);
+
+      if (next == ':' || next == '.') {
+        /* value was hours, minutes or seconds */
+        val = val * 60 + numval;
+        gst_byte_reader_skip (br, 1);
+      } else {
+        /* Reached the milliseconds, convert to GstClockTime */
+        val = val * GST_SECOND + numval * GST_MSECOND;
+        res = TRUE;
+      }
+    }
+  }
+
+  if (res) {
+    *t = val;
+    if (remainder) {
+      if (gst_byte_reader_get_remaining (br))
+        *remainder = (const gchar *) gst_byte_reader_peek_data_unchecked (br);
+      else
+        *remainder = NULL;
+    }
+  }
+
+  return res;
+}
+
+static inline void
+br_skipwhitespace (GstByteReader * br)
+{
+  while (gst_byte_reader_get_remaining (br)) {
+    guint8 d = gst_byte_reader_peek_uint8_unchecked (br);
+    if (d != ' ' && d != '\t')
+      return;
+    gst_byte_reader_skip_unchecked (br, 1);
+  }
+}
+
+/* Returns TRUE if br starts with str
+ *
+ * Skips any spaces/tabs before and after str */
+static gboolean
+br_startswith (GstByteReader * br, const gchar * str, gboolean skip_ws)
+{
+  guint len = strlen (str);
+  const guint8 *data;
+
+  if (skip_ws)
+    br_skipwhitespace (br);
+  if (!gst_byte_reader_peek_data (br, len, &data))
+    return FALSE;
+  if (strncmp ((gchar *) data, str, len))
+    return FALSE;
+  gst_byte_reader_skip_unchecked (br, len);
+  if (skip_ws)
+    br_skipwhitespace (br);
+
+  return TRUE;
+}
+
+static gboolean
+gst_hls_demux_webvtt_read_x_timestamp_map (gchar * data, GstClockTime * local,
+    GstClockTime * mpegts)
+{
+  GstByteReader br;
+
+  gst_byte_reader_init (&br, (guint8 *) data, strlen (data));
+
+  if (!br_startswith (&br, "X-TIMESTAMP-MAP=", FALSE))
+    return FALSE;
+
+  if (br_startswith (&br, "MPEGTS:", TRUE)) {
+    if (!byte_reader_get_next_uint64_string (&br, mpegts))
+      return FALSE;
+    /* Convert to GstClockTime */
+    *mpegts = MPEGTIME_TO_GSTTIME (*mpegts);
+    if (!br_startswith (&br, ",", TRUE))
+      return FALSE;
+    if (!br_startswith (&br, "LOCAL:", TRUE))
+      return FALSE;
+    if (!parse_webvtt_time (&br, local, NULL))
+      return FALSE;
+  } else if (br_startswith (&br, "LOCAL:", TRUE)) {
+    if (!parse_webvtt_time (&br, local, NULL))
+      return FALSE;
+    if (!br_startswith (&br, ",", TRUE))
+      return FALSE;
+    if (!br_startswith (&br, "MPEGTS:", TRUE))
+      return FALSE;
+    if (!byte_reader_get_next_uint64_string (&br, mpegts))
+      return FALSE;
+    /* Convert to GstClockTime */
+    *mpegts = MPEGTIME_TO_GSTTIME (*mpegts);
+  } else {
+    return FALSE;
+  }
+
+  GST_DEBUG ("local time:%" GST_TIME_FORMAT ", mpegts time:%" GST_TIME_FORMAT,
+      GST_TIME_ARGS (*local), GST_TIME_ARGS (*mpegts));
+
+  return TRUE;
+}
+
+static gboolean
+utf8_string_contains_alnum (gchar * string)
+{
+  gunichar c;
+
+  while ((c = g_utf8_get_char (string))) {
+    if (g_unichar_isalnum (c))
+      return TRUE;
+    string = g_utf8_next_char (string);
+  }
+
+  return FALSE;
+}
+
+#define T_H(t) ((t) / (GST_SECOND * 60 * 60))
+#define T_M(t) ((t) / (GST_SECOND * 60) % 60)
+#define T_S(t) ((t) / GST_SECOND % 60)
+#define WEBVTT_TIME_FORMAT "02u:%02u:%02u.%03u"
+#define WEBVTT_TIME_ARGS(t)                      \
+  (guint) ((t) / (GST_SECOND * 60 * 60)) ,       \
+ (guint) ((t) / (GST_SECOND * 60) % 60), \
+(guint) ((t) / GST_SECOND % 60),                 \
+        (guint) ((t) / GST_MSECOND % 1000)
+static gboolean
+process_webvtt_cue_timing_setting_line (const gchar * input,
+    GstClockTime * start, GstClockTime * stop, const gchar ** cue_settings)
+{
+  GstByteReader br;
+
+  gst_byte_reader_init (&br, (guint8 *) input, strlen (input));
+
+  /* Handle cue timing start */
+  if (!parse_webvtt_time (&br, start, NULL))
+    return FALSE;
+
+  /* --> */
+  if (gst_byte_reader_get_remaining (&br) < 15 ||
+      g_ascii_strncasecmp ((const gchar *)
+          gst_byte_reader_peek_data_unchecked (&br), "-->", 3))
+    return FALSE;
+
+  gst_byte_reader_skip (&br, 4);
+
+  /* Handle cue timing stop */
+  if (!parse_webvtt_time (&br, stop, cue_settings))
+    return FALSE;
+
+  return TRUE;
+}
+
+static GstClockTimeDiff
+convert_webvtt_to_stream_time (GstHLSTimeMap * map, GstClockTime localtime,
+    GstClockTime mpegtime, GstClockTime vtt_value)
+{
+  GstClockTimeDiff res;
+
+  if (localtime == GST_CLOCK_TIME_NONE || mpegtime == GST_CLOCK_TIME_NONE) {
+    GST_DEBUG ("No X-TIMESTAMP-MAP, assuming values are MPEG-TS values");
+    res = gst_hls_internal_to_stream_time (map, vtt_value);
+
+    /* VTT only uses positive values */
+    if (res < 0)
+      res = 0;
+  } else {
+    GST_DEBUG ("Converting %" GST_TIME_FORMAT,
+        GST_TIME_ARGS (vtt_value + mpegtime - localtime));
+    res =
+        gst_hls_internal_to_stream_time (map, vtt_value + mpegtime - localtime);
+    if (res == GST_CLOCK_STIME_NONE) {
+      GST_WARNING ("Couldn't convert value, using original value %"
+          GST_TIME_FORMAT, GST_TIME_ARGS (vtt_value));
+      res = vtt_value;
+    } else if (res < 0) {
+      res = 0;
+    }
+  }
+
+  return res;
+}
+
+GstHLSParserResult
+gst_hlsdemux_handle_content_webvtt (GstHLSDemux * demux,
+    GstHLSDemuxStream * hls_stream, gboolean draining, GstBuffer ** buffer)
+{
+  GstHLSParserResult ret = GST_HLS_PARSER_RESULT_DONE;
+  gchar *original_content;
+  guint i, nb;
+  gchar **original_lines;
+  GstClockTime localtime = GST_CLOCK_TIME_NONE;
+  GstClockTime mpegtime = GST_CLOCK_TIME_NONE;
+  GstClockTime low_stream_time = GST_CLOCK_STIME_NONE;
+  GstClockTime high_stream_time = GST_CLOCK_STIME_NONE;
+  gboolean found_timing = FALSE;
+  gboolean found_text = FALSE;
+  GPtrArray *builder;
+  GstM3U8MediaSegment *current_segment = hls_stream->current_segment;
+  GstClockTimeDiff segment_start, segment_end;
+  GstClockTimeDiff tolerance;
+  gboolean out_of_bounds = FALSE;
+  GstHLSTimeMap *map;
+
+  /* We only process full webvtt fragments */
+  if (!draining)
+    return GST_HLS_PARSER_RESULT_NEED_MORE_DATA;
+
+  original_content = gst_hls_buf_to_utf8_text (*buffer);
+
+  if (!original_content)
+    return GST_HLS_PARSER_RESULT_ERROR;
+
+  segment_start = current_segment->stream_time;
+  segment_end = segment_start + current_segment->duration;
+  tolerance = MAX (current_segment->duration / 2, 500 * GST_MSECOND);
+
+  map = gst_hls_find_time_map (demux, current_segment->discont_sequence);
+
+  builder = g_ptr_array_new_with_free_func (g_free);
+
+  original_lines = g_strsplit_set (original_content, "\n\r", 0);
+  nb = g_strv_length (original_lines);
+
+  for (i = 0; i < nb; i++) {
+    gchar *line = original_lines[i];
+
+    GST_LOG ("Line: %s", line);
+
+    if (g_str_has_prefix (line, "X-TIMESTAMP-MAP=")) {
+      if (!gst_hls_demux_webvtt_read_x_timestamp_map (line, &localtime,
+              &mpegtime)) {
+        GST_WARNING ("webvtt timestamp map isn't valid");
+        ret = GST_HLS_PARSER_RESULT_ERROR;
+        goto out;
+      }
+      g_ptr_array_add (builder, g_strdup (line));
+    } else if (strstr (line, " --> ")) {
+      GstClockTime start, stop;
+      const gchar *leftover;
+      if (process_webvtt_cue_timing_setting_line (line, &start, &stop,
+              &leftover)) {
+        GstClockTimeDiff start_stream, stop_stream;
+        gchar *newline;
+
+        GST_LOG ("Found time line %" GST_TIME_FORMAT " -> %" GST_TIME_FORMAT,
+            GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+
+        start_stream =
+            convert_webvtt_to_stream_time (map, localtime, mpegtime, start);
+        stop_stream =
+            convert_webvtt_to_stream_time (map, localtime, mpegtime, stop);
+
+        GST_LOG ("Stream time %" GST_STIME_FORMAT " --> %" GST_STIME_FORMAT,
+            GST_STIME_ARGS (start_stream), GST_STIME_ARGS (stop_stream));
+
+        if (stop_stream < (segment_start - tolerance) ||
+            start_stream > (segment_end + tolerance)) {
+          GST_WARNING ("Out of bounds");
+          out_of_bounds = TRUE;
+        }
+        if (low_stream_time == GST_CLOCK_STIME_NONE
+            || stop_stream < low_stream_time)
+          low_stream_time = stop_stream;
+        if (high_stream_time == GST_CLOCK_STIME_NONE
+            || start_stream > high_stream_time)
+          high_stream_time = start_stream;
+
+        /* Apply the stream presentation offset */
+        start_stream += hls_stream->presentation_offset;
+        stop_stream += hls_stream->presentation_offset;
+
+        /* Create the time-shifted WebVTT cue line */
+        if (leftover) {
+          newline =
+              g_strdup_printf ("%" WEBVTT_TIME_FORMAT " --> %"
+              WEBVTT_TIME_FORMAT " %s", WEBVTT_TIME_ARGS (start_stream),
+              WEBVTT_TIME_ARGS (stop_stream), leftover);
+        } else {
+          newline =
+              g_strdup_printf ("%" WEBVTT_TIME_FORMAT " --> %"
+              WEBVTT_TIME_FORMAT, WEBVTT_TIME_ARGS (start_stream),
+              WEBVTT_TIME_ARGS (stop_stream));
+        }
+        GST_LOG ("Generated line '%s'", newline);
+        g_ptr_array_add (builder, newline);
+        found_timing = TRUE;
+      } else {
+        GST_WARNING ("Failed to parse time line '%s'", line);
+        /* Abort ? */
+      }
+    } else if (found_timing && !found_text) {
+      gchar *linecopy = g_strdup (line);
+      g_ptr_array_add (builder, linecopy);
+      if (utf8_string_contains_alnum (linecopy)) {
+        GST_DEBUG ("Non-empty line '%s'", line);
+        found_text = TRUE;
+      }
+    } else {
+      g_ptr_array_add (builder, g_strdup (line));
+    }
+  }
+
+out:
+  if (ret) {
+    gchar *newfile;
+    /* Add NULL-terminator to string list */
+    g_ptr_array_add (builder, NULL);
+    newfile = g_strjoinv ("\n", (gchar **) builder->pdata);
+    GST_DEBUG ("newfile:\n%s", newfile);
+    gst_buffer_unref (*buffer);
+    *buffer = gst_buffer_new_wrapped (newfile, strlen (newfile));
+  }
+
+  GST_DEBUG_OBJECT (hls_stream,
+      "Stream time %" GST_STIME_FORMAT " -> %" GST_STIME_FORMAT,
+      GST_STIME_ARGS (low_stream_time), GST_STIME_ARGS (high_stream_time));
+
+  g_ptr_array_unref (builder);
+
+  g_strfreev (original_lines);
+  g_free (original_content);
+
+  if (out_of_bounds) {
+    GstM3U8MediaSegment *candidate_segment;
+
+    /* The computed stream time falls outside of the guesstimated stream time,
+     * reassess which segment we really are in */
+    GST_WARNING ("Cue %" GST_STIME_FORMAT " -> %" GST_STIME_FORMAT
+        " is outside of segment %" GST_STIME_FORMAT " -> %"
+        GST_STIME_FORMAT, GST_STIME_ARGS (low_stream_time),
+        GST_STIME_ARGS (high_stream_time),
+        GST_STIME_ARGS (current_segment->stream_time),
+        GST_STIME_ARGS (current_segment->stream_time +
+            current_segment->duration));
+
+    candidate_segment =
+        gst_hls_media_playlist_seek (hls_stream->playlist, TRUE,
+        GST_SEEK_FLAG_SNAP_NEAREST, low_stream_time);
+    if (candidate_segment) {
+      g_assert (candidate_segment != current_segment);
+      GST_DEBUG_OBJECT (hls_stream,
+          "Stream time corresponds to segment %" GST_STIME_FORMAT
+          " duration %" GST_TIME_FORMAT,
+          GST_STIME_ARGS (candidate_segment->stream_time),
+          GST_TIME_ARGS (candidate_segment->duration));
+      /* Recalculate everything and ask parent class to restart */
+      hls_stream->current_segment->stream_time = candidate_segment->stream_time;
+      gst_hls_media_playlist_recalculate_stream_time (hls_stream->playlist,
+          hls_stream->current_segment);
+      gst_m3u8_media_segment_unref (candidate_segment);
+    }
+  }
+
+  if (!found_text) {
+    GST_DEBUG_OBJECT (hls_stream, "Replacing buffer with droppable buffer");
+
+    GST_BUFFER_PTS (*buffer) =
+        current_segment->stream_time + hls_stream->presentation_offset;
+    GST_BUFFER_DURATION (*buffer) = current_segment->duration;
+
+    gst_buffer_set_flags (*buffer, GST_BUFFER_FLAG_DROPPABLE);
+  }
+
+  return ret;
+}
+
+/* Get a utf8-validated string of the contents of the buffer */
+gchar *
+gst_hls_buf_to_utf8_text (GstBuffer * buf)
+{
+  GstMapInfo info;
+  gchar *playlist;
+
+  if (!gst_buffer_map (buf, &info, GST_MAP_READ))
+    goto map_error;
+
+  if (!g_utf8_validate ((gchar *) info.data, info.size, NULL))
+    goto validate_error;
+
+  /* alloc size + 1 to end with a null character */
+  playlist = g_malloc0 (info.size + 1);
+  memcpy (playlist, info.data, info.size);
+
+  gst_buffer_unmap (buf, &info);
+  return playlist;
+
+validate_error:
+  gst_buffer_unmap (buf, &info);
+map_error:
+  return NULL;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlsdemux.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlsdemux.c
new file mode 100644 (file)
index 0000000..67051d1
--- /dev/null
@@ -0,0 +1,2676 @@
+/* GStreamer
+ * Copyright (C) 2010 Marc-Andre Lureau <marcandre.lureau@gmail.com>
+ * Copyright (C) 2010 Andoni Morales Alastruey <ylatuya@gmail.com>
+ * Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
+ *  Author: Youness Alaoui <youness.alaoui@collabora.co.uk>, Collabora Ltd.
+ *  Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ * Copyright (C) 2015 Tim-Philipp Müller <tim@centricular.com>
+ *
+ * Copyright (C) 2021-2022 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ * Gsthlsdemux.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-hlsdemux2
+ * @title: hlsdemux2
+ *
+ * HTTP Live Streaming demuxer element.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 playbin3 uri=http://devimages.apple.com/iphone/samples/bipbop/gear4/prog_index.m3u8
+ * ]|
+ *
+ * Since: 1.22
+ */
+
+#ifdef HAVE_CONFIG_H
+#  include "config.h"
+#endif
+
+#include <string.h>
+#include <gst/base/gsttypefindhelper.h>
+#include <gst/tag/tag.h>
+
+#include "gsthlselements.h"
+#include "gsthlsdemux.h"
+
+static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+    GST_PAD_SINK,
+    GST_PAD_ALWAYS,
+    GST_STATIC_CAPS ("application/x-hls"));
+
+GST_DEBUG_CATEGORY (gst_hls_demux2_debug);
+#define GST_CAT_DEFAULT gst_hls_demux2_debug
+
+enum
+{
+  PROP_0,
+
+  PROP_START_BITRATE,
+};
+
+#define DEFAULT_START_BITRATE 0
+
+/* Maximum values for mpeg-ts DTS values */
+#define MPEG_TS_MAX_PTS (((((guint64)1) << 33) * (guint64)100000) / 9)
+
+/* GObject */
+static void gst_hls_demux_finalize (GObject * obj);
+
+/* GstElement */
+static GstStateChangeReturn
+gst_hls_demux_change_state (GstElement * element, GstStateChange transition);
+
+/* GstHLSDemux */
+static gboolean gst_hls_demux_update_playlist (GstHLSDemux * demux,
+    gboolean update, GError ** err);
+
+/* FIXME: the return value is never used? */
+static gboolean gst_hls_demux_change_playlist (GstHLSDemux * demux,
+    guint max_bitrate, gboolean * changed);
+static GstBuffer *gst_hls_demux_decrypt_fragment (GstHLSDemux * demux,
+    GstHLSDemuxStream * stream, GstBuffer * encrypted_buffer, GError ** err);
+static gboolean
+gst_hls_demux_stream_decrypt_start (GstHLSDemuxStream * stream,
+    const guint8 * key_data, const guint8 * iv_data);
+static void gst_hls_demux_stream_decrypt_end (GstHLSDemuxStream * stream);
+
+static gboolean gst_hls_demux_is_live (GstAdaptiveDemux * demux);
+static GstClockTime gst_hls_demux_get_duration (GstAdaptiveDemux * demux);
+static gint64 gst_hls_demux_get_manifest_update_interval (GstAdaptiveDemux *
+    demux);
+static gboolean gst_hls_demux_process_manifest (GstAdaptiveDemux * demux,
+    GstBuffer * buf);
+static gboolean gst_hls_demux_stream_update_rendition_playlist (GstHLSDemux *
+    demux, GstHLSDemuxStream * stream);
+static GstFlowReturn gst_hls_demux_update_manifest (GstAdaptiveDemux * demux);
+
+static void setup_initial_playlist_and_mapping (GstHLSDemux * demux,
+    GstHLSMediaPlaylist * playlist);
+static GstHLSTimeMap *gst_hls_demux_add_time_mapping (GstHLSDemux * demux,
+    gint64 dsn, GstClockTimeDiff stream_time, GDateTime * pdt);
+
+static gboolean gst_hls_demux_seek (GstAdaptiveDemux * demux, GstEvent * seek);
+static GstFlowReturn gst_hls_demux_stream_seek (GstAdaptiveDemux2Stream *
+    stream, gboolean forward, GstSeekFlags flags, GstClockTimeDiff ts,
+    GstClockTimeDiff * final_ts);
+static gboolean
+gst_hls_demux_start_fragment (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+static GstFlowReturn gst_hls_demux_finish_fragment (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+static GstFlowReturn gst_hls_demux_data_received (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer);
+
+static gboolean gst_hls_demux_stream_has_next_fragment (GstAdaptiveDemux2Stream
+    * stream);
+static GstFlowReturn gst_hls_demux_advance_fragment (GstAdaptiveDemux2Stream *
+    stream);
+static GstFlowReturn gst_hls_demux_update_fragment_info (GstAdaptiveDemux2Stream
+    * stream);
+static gboolean gst_hls_demux_stream_can_start (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+static void gst_hls_demux_stream_update_tracks (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream);
+static gboolean gst_hls_demux_select_bitrate (GstAdaptiveDemux2Stream * stream,
+    guint64 bitrate);
+static void gst_hls_demux_reset (GstAdaptiveDemux * demux);
+static gboolean gst_hls_demux_get_live_seek_range (GstAdaptiveDemux * demux,
+    gint64 * start, gint64 * stop);
+static GstClockTime gst_hls_demux_get_presentation_offset (GstAdaptiveDemux *
+    demux, GstAdaptiveDemux2Stream * stream);
+static void gst_hls_demux_set_current_variant (GstHLSDemux * hlsdemux,
+    GstHLSVariantStream * variant);
+
+static void gst_hls_demux_stream_finalize (GObject * object);
+
+#define gst_hls_demux_stream_parent_class stream_parent_class
+G_DEFINE_TYPE (GstHLSDemuxStream, gst_hls_demux_stream,
+    GST_TYPE_ADAPTIVE_DEMUX2_STREAM);
+
+GST_ELEMENT_REGISTER_DEFINE (hlsdemux2, "hlsdemux2",
+    GST_RANK_PRIMARY + 1, GST_TYPE_HLS_DEMUX2);
+
+static void
+gst_hls_demux_stream_class_init (GstHLSDemuxStreamClass * klass)
+{
+  GObjectClass *gobject_class = (GObjectClass *) klass;
+
+  gobject_class->finalize = gst_hls_demux_stream_finalize;
+}
+
+static void
+gst_hls_demux_stream_init (GstHLSDemuxStream * stream)
+{
+  stream->parser_type = GST_HLS_PARSER_NONE;
+  stream->do_typefind = TRUE;
+  stream->reset_pts = TRUE;
+  stream->presentation_offset = 60 * GST_SECOND;
+}
+
+typedef struct _GstHLSDemux2 GstHLSDemux2;
+typedef struct _GstHLSDemux2Class GstHLSDemux2Class;
+
+#define gst_hls_demux2_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstHLSDemux2, gst_hls_demux2, GST_TYPE_ADAPTIVE_DEMUX,
+    hls_element_init ());
+
+static void
+gst_hls_demux_finalize (GObject * obj)
+{
+  GstHLSDemux *demux = GST_HLS_DEMUX (obj);
+
+  gst_hls_demux_reset (GST_ADAPTIVE_DEMUX_CAST (demux));
+  g_mutex_clear (&demux->keys_lock);
+  if (demux->keys) {
+    g_hash_table_unref (demux->keys);
+    demux->keys = NULL;
+  }
+
+  G_OBJECT_CLASS (parent_class)->finalize (obj);
+}
+
+static void
+gst_hls_demux_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  GstHLSDemux *demux = GST_HLS_DEMUX (object);
+
+  switch (prop_id) {
+    case PROP_START_BITRATE:
+      demux->start_bitrate = g_value_get_uint (value);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_hls_demux_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  GstHLSDemux *demux = GST_HLS_DEMUX (object);
+
+  switch (prop_id) {
+    case PROP_START_BITRATE:
+      g_value_set_uint (value, demux->start_bitrate);
+      break;
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+
+static void
+gst_hls_demux2_class_init (GstHLSDemux2Class * klass)
+{
+  GObjectClass *gobject_class;
+  GstElementClass *element_class;
+  GstAdaptiveDemuxClass *adaptivedemux_class;
+
+  gobject_class = (GObjectClass *) klass;
+  element_class = (GstElementClass *) klass;
+  adaptivedemux_class = (GstAdaptiveDemuxClass *) klass;
+
+  gobject_class->set_property = gst_hls_demux_set_property;
+  gobject_class->get_property = gst_hls_demux_get_property;
+  gobject_class->finalize = gst_hls_demux_finalize;
+
+  g_object_class_install_property (gobject_class, PROP_START_BITRATE,
+      g_param_spec_uint ("start-bitrate", "Starting Bitrate",
+          "Initial bitrate to use to choose first alternate (0 = automatic) (bits/s)",
+          0, G_MAXUINT, DEFAULT_START_BITRATE,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+  element_class->change_state = GST_DEBUG_FUNCPTR (gst_hls_demux_change_state);
+
+  gst_element_class_add_static_pad_template (element_class, &sinktemplate);
+
+  gst_element_class_set_static_metadata (element_class,
+      "HLS Demuxer",
+      "Codec/Demuxer/Adaptive",
+      "HTTP Live Streaming demuxer",
+      "Edward Hervey <edward@centricular.com>\n"
+      "Jan Schmidt <jan@centricular.com>");
+
+  adaptivedemux_class->is_live = gst_hls_demux_is_live;
+  adaptivedemux_class->get_live_seek_range = gst_hls_demux_get_live_seek_range;
+  adaptivedemux_class->get_presentation_offset =
+      gst_hls_demux_get_presentation_offset;
+  adaptivedemux_class->get_duration = gst_hls_demux_get_duration;
+  adaptivedemux_class->get_manifest_update_interval =
+      gst_hls_demux_get_manifest_update_interval;
+  adaptivedemux_class->process_manifest = gst_hls_demux_process_manifest;
+  adaptivedemux_class->update_manifest = gst_hls_demux_update_manifest;
+  adaptivedemux_class->reset = gst_hls_demux_reset;
+  adaptivedemux_class->seek = gst_hls_demux_seek;
+  adaptivedemux_class->stream_seek = gst_hls_demux_stream_seek;
+  adaptivedemux_class->stream_has_next_fragment =
+      gst_hls_demux_stream_has_next_fragment;
+  adaptivedemux_class->stream_advance_fragment = gst_hls_demux_advance_fragment;
+  adaptivedemux_class->stream_update_fragment_info =
+      gst_hls_demux_update_fragment_info;
+  adaptivedemux_class->stream_select_bitrate = gst_hls_demux_select_bitrate;
+  adaptivedemux_class->stream_can_start = gst_hls_demux_stream_can_start;
+  adaptivedemux_class->stream_update_tracks =
+      gst_hls_demux_stream_update_tracks;
+
+  adaptivedemux_class->start_fragment = gst_hls_demux_start_fragment;
+  adaptivedemux_class->finish_fragment = gst_hls_demux_finish_fragment;
+  adaptivedemux_class->data_received = gst_hls_demux_data_received;
+
+  GST_DEBUG_CATEGORY_INIT (gst_hls_demux2_debug, "hlsdemux2", 0,
+      "hlsdemux2 element");
+}
+
+static void
+gst_hls_demux2_init (GstHLSDemux * demux)
+{
+  demux->keys = g_hash_table_new_full (g_str_hash, g_str_equal, g_free, g_free);
+  g_mutex_init (&demux->keys_lock);
+}
+
+static GstStateChangeReturn
+gst_hls_demux_change_state (GstElement * element, GstStateChange transition)
+{
+  GstStateChangeReturn ret;
+  GstHLSDemux *demux = GST_HLS_DEMUX (element);
+
+  switch (transition) {
+    case GST_STATE_CHANGE_READY_TO_PAUSED:
+      gst_hls_demux_reset (GST_ADAPTIVE_DEMUX_CAST (demux));
+      break;
+    default:
+      break;
+  }
+
+  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+  switch (transition) {
+    case GST_STATE_CHANGE_PAUSED_TO_READY:
+      gst_hls_demux_reset (GST_ADAPTIVE_DEMUX_CAST (demux));
+      g_hash_table_remove_all (demux->keys);
+      break;
+    default:
+      break;
+  }
+  return ret;
+}
+
+static guint64
+gst_hls_demux_get_bitrate (GstHLSDemux * hlsdemux)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (hlsdemux);
+
+  /* FIXME !!!
+   *
+   * No, there isn't a single output :D */
+
+  /* Valid because hlsdemux only has a single output */
+  if (demux->input_period->streams) {
+    GstAdaptiveDemux2Stream *stream = demux->input_period->streams->data;
+    return stream->current_download_rate;
+  }
+
+  return 0;
+}
+
+static void
+gst_hls_demux_stream_clear_pending_data (GstHLSDemuxStream * hls_stream,
+    gboolean force)
+{
+  GST_DEBUG_OBJECT (hls_stream, "force : %d", force);
+  if (hls_stream->pending_encrypted_data)
+    gst_adapter_clear (hls_stream->pending_encrypted_data);
+  gst_buffer_replace (&hls_stream->pending_decrypted_buffer, NULL);
+  gst_buffer_replace (&hls_stream->pending_typefind_buffer, NULL);
+  if (force || !hls_stream->pending_data_is_header) {
+    gst_buffer_replace (&hls_stream->pending_segment_data, NULL);
+    hls_stream->pending_data_is_header = FALSE;
+  }
+  hls_stream->current_offset = -1;
+  hls_stream->process_buffer_content = TRUE;
+  gst_hls_demux_stream_decrypt_end (hls_stream);
+}
+
+static void
+gst_hls_demux_clear_all_pending_data (GstHLSDemux * hlsdemux)
+{
+  GstAdaptiveDemux *demux = (GstAdaptiveDemux *) hlsdemux;
+  GList *walk;
+
+  if (!demux->input_period)
+    return;
+
+  for (walk = demux->input_period->streams; walk != NULL; walk = walk->next) {
+    GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (walk->data);
+    gst_hls_demux_stream_clear_pending_data (hls_stream, TRUE);
+  }
+}
+
+#define SEEK_UPDATES_PLAY_POSITION(r, start_type, stop_type) \
+  ((r >= 0 && start_type != GST_SEEK_TYPE_NONE) || \
+   (r < 0 && stop_type != GST_SEEK_TYPE_NONE))
+
+#define IS_SNAP_SEEK(f) (f & (GST_SEEK_FLAG_SNAP_BEFORE |        \
+                              GST_SEEK_FLAG_SNAP_AFTER |         \
+                              GST_SEEK_FLAG_SNAP_NEAREST |       \
+                             GST_SEEK_FLAG_TRICKMODE_KEY_UNITS | \
+                             GST_SEEK_FLAG_KEY_UNIT))
+
+static gboolean
+gst_hls_demux_seek (GstAdaptiveDemux * demux, GstEvent * seek)
+{
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  GstFormat format;
+  GstSeekFlags flags;
+  GstSeekType start_type, stop_type;
+  gint64 start, stop;
+  gdouble rate, old_rate;
+  GList *walk;
+  gint64 current_pos, target_pos, final_pos;
+  guint64 bitrate;
+
+  gst_event_parse_seek (seek, &rate, &format, &flags, &start_type, &start,
+      &stop_type, &stop);
+
+  if (!SEEK_UPDATES_PLAY_POSITION (rate, start_type, stop_type)) {
+    /* nothing to do if we don't have to update the current position */
+    return TRUE;
+  }
+
+  old_rate = demux->segment.rate;
+
+  bitrate = gst_hls_demux_get_bitrate (hlsdemux);
+
+  /* Use I-frame variants for trick modes */
+  if (hlsdemux->master->iframe_variants != NULL
+      && rate < -1.0 && old_rate >= -1.0 && old_rate <= 1.0) {
+    GError *err = NULL;
+
+    /* Switch to I-frame variant */
+    gst_hls_demux_set_current_variant (hlsdemux,
+        hlsdemux->master->iframe_variants->data);
+
+    if (!gst_hls_demux_update_playlist (hlsdemux, FALSE, &err)) {
+      GST_ELEMENT_ERROR_FROM_ERROR (hlsdemux, "Could not switch playlist", err);
+      return FALSE;
+    }
+    //hlsdemux->discont = TRUE;
+
+    gst_hls_demux_change_playlist (hlsdemux, bitrate / ABS (rate), NULL);
+  } else if (rate > -1.0 && rate <= 1.0 && (old_rate < -1.0 || old_rate > 1.0)) {
+    GError *err = NULL;
+    /* Switch to normal variant */
+    gst_hls_demux_set_current_variant (hlsdemux,
+        hlsdemux->master->variants->data);
+
+    if (!gst_hls_demux_update_playlist (hlsdemux, FALSE, &err)) {
+      GST_ELEMENT_ERROR_FROM_ERROR (hlsdemux, "Could not switch playlist", err);
+      return FALSE;
+    }
+    //hlsdemux->discont = TRUE;
+    /* TODO why not continue using the same? that was being used up to now? */
+    gst_hls_demux_change_playlist (hlsdemux, bitrate, NULL);
+  }
+
+  target_pos = rate < 0 ? stop : start;
+  final_pos = target_pos;
+
+  /* properly cleanup pending decryption status */
+  if (flags & GST_SEEK_FLAG_FLUSH) {
+    gst_hls_demux_clear_all_pending_data (hlsdemux);
+  }
+
+  for (walk = demux->input_period->streams; walk; walk = g_list_next (walk)) {
+    GstAdaptiveDemux2Stream *stream =
+        GST_ADAPTIVE_DEMUX2_STREAM_CAST (walk->data);
+
+    if (gst_hls_demux_stream_seek (stream, rate >= 0, flags, target_pos,
+            &current_pos) != GST_FLOW_OK) {
+      GST_ERROR_OBJECT (stream, "Failed to seek on stream");
+      return FALSE;
+    }
+
+    /* FIXME: use minimum position always ? */
+    if (final_pos > current_pos)
+      final_pos = current_pos;
+  }
+
+  if (IS_SNAP_SEEK (flags)) {
+    if (rate >= 0)
+      gst_segment_do_seek (&demux->segment, rate, format, flags, start_type,
+          final_pos, stop_type, stop, NULL);
+    else
+      gst_segment_do_seek (&demux->segment, rate, format, flags, start_type,
+          start, stop_type, final_pos, NULL);
+  }
+
+  return TRUE;
+}
+
+static GstFlowReturn
+gst_hls_demux_stream_seek (GstAdaptiveDemux2Stream * stream, gboolean forward,
+    GstSeekFlags flags, GstClockTimeDiff ts, GstClockTimeDiff * final_ts)
+{
+  GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+  GstHLSDemux *hlsdemux = (GstHLSDemux *) stream->demux;
+  GstM3U8MediaSegment *new_position;
+
+  GST_DEBUG_OBJECT (stream,
+      "is_variant:%d media:%p current_variant:%p forward:%d ts:%"
+      GST_TIME_FORMAT, hls_stream->is_variant, hls_stream->current_rendition,
+      hlsdemux->current_variant, forward, GST_TIME_ARGS (ts));
+
+  /* If the rendition playlist needs to be updated, do it now */
+  if (!hls_stream->is_variant && !hls_stream->playlist_fetched) {
+    if (!gst_hls_demux_stream_update_rendition_playlist (hlsdemux, hls_stream)) {
+      GST_WARNING_OBJECT (stream,
+          "Failed to update the rendition playlist before seeking");
+      return GST_FLOW_ERROR;
+    }
+  }
+
+  /* We seeked, reset pending_advance */
+  hls_stream->pending_advance = FALSE;
+
+  new_position =
+      gst_hls_media_playlist_seek (hls_stream->playlist, forward, flags, ts);
+  if (new_position) {
+    if (hls_stream->current_segment)
+      gst_m3u8_media_segment_unref (hls_stream->current_segment);
+    hls_stream->current_segment = new_position;
+    hls_stream->reset_pts = TRUE;
+    if (final_ts)
+      *final_ts = new_position->stream_time;
+  } else {
+    GST_WARNING_OBJECT (stream, "Seeking failed");
+    return GST_FLOW_ERROR;
+  }
+
+  return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_hls_demux_update_manifest (GstAdaptiveDemux * demux)
+{
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  if (!gst_hls_demux_update_playlist (hlsdemux, TRUE, NULL))
+    return GST_FLOW_ERROR;
+
+  return GST_FLOW_OK;
+}
+
+static GstAdaptiveDemux2Stream *
+create_common_hls_stream (GstHLSDemux * demux, const gchar * name)
+{
+  GstAdaptiveDemux2Stream *stream;
+
+  stream = g_object_new (GST_TYPE_HLS_DEMUX_STREAM, "name", name, NULL);
+  gst_adaptive_demux2_add_stream ((GstAdaptiveDemux *) demux, stream);
+
+  return stream;
+}
+
+static GstAdaptiveDemuxTrack *
+new_track_for_rendition (GstHLSDemux * demux, GstHLSRenditionStream * rendition,
+    GstCaps * caps, GstStreamFlags flags, GstTagList * tags)
+{
+  GstAdaptiveDemuxTrack *track;
+  gchar *stream_id;
+  GstStreamType stream_type = gst_stream_type_from_hls_type (rendition->mtype);
+
+  if (rendition->name)
+    stream_id =
+        g_strdup_printf ("%s-%s", gst_stream_type_get_name (stream_type),
+        rendition->name);
+  else if (rendition->lang)
+    stream_id =
+        g_strdup_printf ("%s-%s", gst_stream_type_get_name (stream_type),
+        rendition->lang);
+  else
+    stream_id = g_strdup (gst_stream_type_get_name (stream_type));
+
+  if (rendition->lang) {
+    if (tags == NULL)
+      tags = gst_tag_list_new_empty ();
+    if (gst_tag_check_language_code (rendition->lang))
+      gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_LANGUAGE_CODE,
+          rendition->lang, NULL);
+    else
+      gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_LANGUAGE_NAME,
+          rendition->lang, NULL);
+  }
+
+  if (stream_type == GST_STREAM_TYPE_TEXT)
+    flags |= GST_STREAM_FLAG_SPARSE;
+
+  if (rendition->is_default)
+    flags |= GST_STREAM_FLAG_SELECT;
+
+  track =
+      gst_adaptive_demux_track_new ((GstAdaptiveDemux *) demux, stream_type,
+      flags, stream_id, caps, tags);
+  g_free (stream_id);
+
+  return track;
+}
+
+static GstHLSRenditionStream *
+find_uriless_rendition (GstHLSDemux * demux, GstStreamType stream_type)
+{
+  GList *tmp;
+
+  for (tmp = demux->master->renditions; tmp; tmp = tmp->next) {
+    GstHLSRenditionStream *media = tmp->data;
+    if (media->uri == NULL &&
+        gst_stream_type_from_hls_type (media->mtype) == stream_type)
+      return media;
+  }
+  return NULL;
+}
+
+static GstCaps *
+get_caps_of_stream_type (GstCaps * full_caps, GstStreamType streamtype)
+{
+  GstCaps *ret = NULL;
+
+  guint i;
+  for (i = 0; i < gst_caps_get_size (full_caps); i++) {
+    GstStructure *st = gst_caps_get_structure (full_caps, i);
+
+    if (gst_hls_get_stream_type_from_structure (st) == streamtype) {
+      ret = gst_caps_new_empty ();
+      gst_caps_append_structure (ret, gst_structure_copy (st));
+      break;
+    }
+  }
+
+  return ret;
+}
+
+static void
+gst_hls_demux_stream_update_tracks (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstHLSDemux *hlsdemux = (GstHLSDemux *) demux;
+  GstHLSDemuxStream *hlsdemux_stream = (GstHLSDemuxStream *) stream;
+  guint i;
+  GstStreamType uriless_types = 0;
+  GstCaps *variant_caps = NULL;
+
+  GST_DEBUG_OBJECT (demux, "Update tracks of variant stream");
+
+  if (hlsdemux->master->have_codecs) {
+    variant_caps = gst_hls_master_playlist_get_common_caps (hlsdemux->master);
+  }
+
+  /* Use the stream->stream_collection and manifest to create the appropriate tracks */
+  for (i = 0; i < gst_stream_collection_get_size (stream->stream_collection);
+      i++) {
+    GstStream *gst_stream =
+        gst_stream_collection_get_stream (stream->stream_collection, i);
+    GstStreamType stream_type = gst_stream_get_stream_type (gst_stream);
+    GstAdaptiveDemuxTrack *track;
+    GstHLSRenditionStream *embedded_media = NULL;
+    /* tracks from the variant streams should be prefered over those provided by renditions */
+    GstStreamFlags flags =
+        gst_stream_get_stream_flags (gst_stream) | GST_STREAM_FLAG_SELECT;
+    GstCaps *manifest_caps = NULL;
+
+    if (stream_type == GST_STREAM_TYPE_UNKNOWN)
+      continue;
+
+    if (variant_caps)
+      manifest_caps = get_caps_of_stream_type (variant_caps, stream_type);
+    hlsdemux_stream->rendition_type |= stream_type;
+
+    if ((uriless_types & stream_type) == 0) {
+      /* Do we have a uriless media for this stream type */
+      /* Find if there is a rendition without URI, it will be provided by this variant */
+      embedded_media = find_uriless_rendition (hlsdemux, stream_type);
+      /* Remember we used this type for a embedded media */
+      uriless_types |= stream_type;
+    }
+
+    if (embedded_media) {
+      GstTagList *tags = gst_stream_get_tags (gst_stream);
+      GST_DEBUG_OBJECT (demux, "Adding track '%s' to main variant stream",
+          embedded_media->name);
+      track =
+          new_track_for_rendition (hlsdemux, embedded_media, manifest_caps,
+          flags, tags ? gst_tag_list_make_writable (tags) : tags);
+    } else {
+      gchar *stream_id;
+      stream_id =
+          g_strdup_printf ("main-%s-%d", gst_stream_type_get_name (stream_type),
+          i);
+
+      GST_DEBUG_OBJECT (demux, "Adding track '%s' to main variant stream",
+          stream_id);
+      track =
+          gst_adaptive_demux_track_new (demux, stream_type,
+          flags, stream_id, manifest_caps, NULL);
+      g_free (stream_id);
+    }
+    track->upstream_stream_id =
+        g_strdup (gst_stream_get_stream_id (gst_stream));
+    gst_adaptive_demux2_stream_add_track (stream, track);
+    gst_adaptive_demux_track_unref (track);
+  }
+
+  /* Update the stream object with rendition types.
+   * FIXME: rendition_type could be removed */
+  stream->stream_type = hlsdemux_stream->rendition_type;
+}
+
+static void
+create_main_variant_stream (GstHLSDemux * demux)
+{
+  GstAdaptiveDemux2Stream *stream;
+  GstHLSDemuxStream *hlsdemux_stream;
+
+  GST_DEBUG_OBJECT (demux, "Creating main variant stream");
+
+  stream = create_common_hls_stream (demux, "hlsstream-variant");
+  demux->main_stream = hlsdemux_stream = (GstHLSDemuxStream *) stream;
+  hlsdemux_stream->is_variant = TRUE;
+  hlsdemux_stream->playlist_fetched = TRUE;
+  /* Due to HLS manifest information being so unreliable/inconsistent, we will
+   * create the actual tracks once we have information about the streams present
+   * in the variant data stream */
+  stream->pending_tracks = TRUE;
+}
+
+static GstHLSDemuxStream *
+create_rendition_stream (GstHLSDemux * demux, GstHLSRenditionStream * media)
+{
+  GstAdaptiveDemux2Stream *stream;
+  GstAdaptiveDemuxTrack *track;
+  GstHLSDemuxStream *hlsdemux_stream;
+  gchar *stream_name;
+
+  GST_DEBUG_OBJECT (demux,
+      "Creating stream for media %s lang:%s (%" GST_PTR_FORMAT ")", media->name,
+      media->lang, media->caps);
+
+  /* We can't reliably provide caps for HLS target tracks since they might
+   * change at any point in time */
+  track = new_track_for_rendition (demux, media, NULL, 0, NULL);
+
+  stream_name = g_strdup_printf ("hlsstream-%s", track->stream_id);
+  stream = create_common_hls_stream (demux, stream_name);
+  g_free (stream_name);
+  hlsdemux_stream = (GstHLSDemuxStream *) stream;
+
+  hlsdemux_stream->is_variant = FALSE;
+  hlsdemux_stream->playlist_fetched = FALSE;
+  stream->stream_type = hlsdemux_stream->rendition_type =
+      gst_stream_type_from_hls_type (media->mtype);
+  if (media->lang)
+    hlsdemux_stream->lang = g_strdup (media->lang);
+  if (media->name)
+    hlsdemux_stream->name = g_strdup (media->name);
+
+  gst_adaptive_demux2_stream_add_track (stream, track);
+  gst_adaptive_demux_track_unref (track);
+
+  return hlsdemux_stream;
+}
+
+static GstHLSDemuxStream *
+existing_rendition_stream (GList * streams, GstHLSRenditionStream * media)
+{
+  GList *tmp;
+  GstStreamType stream_type = gst_stream_type_from_hls_type (media->mtype);
+
+  for (tmp = streams; tmp; tmp = tmp->next) {
+    GstHLSDemuxStream *demux_stream = tmp->data;
+
+    if (demux_stream->is_variant)
+      continue;
+
+    if (demux_stream->rendition_type == stream_type) {
+      if (!g_strcmp0 (demux_stream->name, media->name))
+        return demux_stream;
+      if (media->lang && !g_strcmp0 (demux_stream->lang, media->lang))
+        return demux_stream;
+    }
+  }
+
+  return NULL;
+}
+
+static gboolean
+gst_hls_demux_setup_streams (GstAdaptiveDemux * demux)
+{
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  GstHLSVariantStream *playlist = hlsdemux->current_variant;
+  GList *tmp;
+  GList *streams = NULL;
+
+  if (playlist == NULL) {
+    GST_WARNING_OBJECT (demux, "Can't configure streams - no variant selected");
+    return FALSE;
+  }
+
+  GST_DEBUG_OBJECT (demux, "Setting up streams");
+
+  /* If there are alternate renditions, we will produce a GstAdaptiveDemux2Stream
+   * and GstAdaptiveDemuxTrack for each combination of GstStreamType and other
+   * unique identifier (for now just language)
+   *
+   * Which actual GstHLSMedia to use for each stream will be determined based on
+   * the `group-id` (if present and more than one) selected on the main variant
+   * stream */
+  for (tmp = hlsdemux->master->renditions; tmp; tmp = tmp->next) {
+    GstHLSRenditionStream *media = tmp->data;
+    GstHLSDemuxStream *media_stream, *previous_media_stream;
+
+    GST_LOG_OBJECT (demux, "Rendition %s name:'%s' lang:'%s' uri:%s",
+        gst_stream_type_get_name (gst_stream_type_from_hls_type (media->mtype)),
+        media->name, media->lang, media->uri);
+
+    if (media->uri == NULL) {
+      GST_DEBUG_OBJECT (demux,
+          "Skipping media '%s' , it's provided by the variant stream",
+          media->name);
+      continue;
+    }
+
+    media_stream = previous_media_stream =
+        existing_rendition_stream (streams, media);
+
+    if (!media_stream) {
+      media_stream = create_rendition_stream (hlsdemux, tmp->data);
+    } else
+      GST_DEBUG_OBJECT (demux, "Re-using existing GstHLSDemuxStream %s %s",
+          media_stream->name, media_stream->lang);
+
+    /* Is this rendition active in the current variant ? */
+    if (!g_strcmp0 (playlist->media_groups[media->mtype], media->group_id)) {
+      GST_DEBUG_OBJECT (demux, "Enabling rendition");
+      media_stream->current_rendition = gst_hls_rendition_stream_ref (media);
+    }
+
+    if (!previous_media_stream)
+      streams = g_list_append (streams, media_stream);
+  }
+
+
+  create_main_variant_stream (hlsdemux);
+
+  return TRUE;
+}
+
+static const gchar *
+gst_adaptive_demux_get_manifest_ref_uri (GstAdaptiveDemux * d)
+{
+  return d->manifest_base_uri ? d->manifest_base_uri : d->manifest_uri;
+}
+
+static void
+gst_hls_demux_set_current_variant (GstHLSDemux * hlsdemux,
+    GstHLSVariantStream * variant)
+{
+  if (hlsdemux->current_variant == variant || variant == NULL)
+    return;
+
+  if (hlsdemux->current_variant != NULL) {
+    GST_DEBUG_OBJECT (hlsdemux, "Will switch from variant '%s' to '%s'",
+        hlsdemux->current_variant->name, variant->name);
+    if (hlsdemux->pending_variant) {
+      GST_ERROR_OBJECT (hlsdemux, "Already waiting for pending variant '%s'",
+          hlsdemux->pending_variant->name);
+      gst_hls_variant_stream_unref (hlsdemux->pending_variant);
+    }
+    hlsdemux->pending_variant = gst_hls_variant_stream_ref (variant);
+  } else {
+    GST_DEBUG_OBJECT (hlsdemux, "Setting variant '%s'", variant->name);
+    hlsdemux->current_variant = gst_hls_variant_stream_ref (variant);
+  }
+}
+
+static gboolean
+gst_hls_demux_process_manifest (GstAdaptiveDemux * demux, GstBuffer * buf)
+{
+  GstHLSVariantStream *variant;
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  gchar *playlist = NULL;
+  gboolean ret;
+  GstHLSMediaPlaylist *simple_media_playlist = NULL;
+
+  GST_INFO_OBJECT (demux, "Initial playlist location: %s (base uri: %s)",
+      demux->manifest_uri, demux->manifest_base_uri);
+
+  playlist = gst_hls_buf_to_utf8_text (buf);
+  if (playlist == NULL) {
+    GST_WARNING_OBJECT (demux, "Error validating initial playlist");
+    return FALSE;
+  }
+
+  hlsdemux->master = gst_hls_master_playlist_new_from_data (playlist,
+      gst_adaptive_demux_get_manifest_ref_uri (demux));
+
+  if (hlsdemux->master == NULL) {
+    /* In most cases, this will happen if we set a wrong url in the
+     * source element and we have received the 404 HTML response instead of
+     * the playlist */
+    GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Invalid playlist."),
+        ("Could not parse playlist. Check if the URL is correct."));
+    return FALSE;
+  }
+
+  if (hlsdemux->master->is_simple) {
+    simple_media_playlist =
+        gst_hls_media_playlist_parse (playlist,
+        gst_adaptive_demux_get_manifest_ref_uri (demux), NULL);
+  }
+
+  /* select the initial variant stream */
+  if (demux->connection_speed == 0) {
+    variant = hlsdemux->master->default_variant;
+  } else if (hlsdemux->start_bitrate > 0) {
+    variant =
+        gst_hls_master_playlist_get_variant_for_bitrate (hlsdemux->master,
+        NULL, hlsdemux->start_bitrate, demux->min_bitrate);
+  } else {
+    variant =
+        gst_hls_master_playlist_get_variant_for_bitrate (hlsdemux->master,
+        NULL, demux->connection_speed, demux->min_bitrate);
+  }
+
+  if (variant) {
+    GST_INFO_OBJECT (hlsdemux,
+        "Manifest processed, initial variant selected : `%s`", variant->name);
+    gst_hls_demux_set_current_variant (hlsdemux, variant);      // FIXME: inline?
+  }
+
+  GST_DEBUG_OBJECT (hlsdemux, "Manifest handled, now setting up streams");
+
+  ret = gst_hls_demux_setup_streams (demux);
+
+  if (simple_media_playlist) {
+    hlsdemux->main_stream->playlist = simple_media_playlist;
+    hlsdemux->main_stream->current_segment =
+        gst_hls_media_playlist_get_starting_segment (simple_media_playlist);
+    setup_initial_playlist_and_mapping (hlsdemux, simple_media_playlist);
+    gst_hls_media_playlist_dump (simple_media_playlist);
+  }
+
+  /* get the selected media playlist (unless the initial list was one already) */
+  if (!hlsdemux->master->is_simple) {
+    GError *err = NULL;
+
+    if (!gst_hls_demux_update_playlist (hlsdemux, FALSE, &err)) {
+      GST_ELEMENT_ERROR_FROM_ERROR (demux, "Could not fetch media playlist",
+          err);
+      return FALSE;
+    }
+  }
+
+  return ret;
+}
+
+static GstClockTime
+gst_hls_demux_get_duration (GstAdaptiveDemux * demux)
+{
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  GstClockTime duration = GST_CLOCK_TIME_NONE;
+
+  if (hlsdemux->main_stream)
+    duration =
+        gst_hls_media_playlist_get_duration (hlsdemux->main_stream->playlist);
+
+  return duration;
+}
+
+static gboolean
+gst_hls_demux_is_live (GstAdaptiveDemux * demux)
+{
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  gboolean is_live = FALSE;
+
+  if (hlsdemux->main_stream)
+    is_live = gst_hls_media_playlist_is_live (hlsdemux->main_stream->playlist);
+
+  return is_live;
+}
+
+static const GstHLSKey *
+gst_hls_demux_get_key (GstHLSDemux * demux, const gchar * key_url,
+    const gchar * referer, gboolean allow_cache)
+{
+  GstAdaptiveDemux *adaptive_demux = GST_ADAPTIVE_DEMUX (demux);
+  DownloadRequest *key_request;
+  DownloadFlags dl_flags = DOWNLOAD_FLAG_NONE;
+  GstBuffer *key_buffer;
+  GstHLSKey *key;
+  GError *err = NULL;
+
+  GST_LOG_OBJECT (demux, "Looking up key for key url %s", key_url);
+
+  g_mutex_lock (&demux->keys_lock);
+
+  key = g_hash_table_lookup (demux->keys, key_url);
+
+  if (key != NULL) {
+    GST_LOG_OBJECT (demux, "Found key for key url %s in key cache", key_url);
+    goto out;
+  }
+
+  GST_INFO_OBJECT (demux, "Fetching key %s", key_url);
+
+  if (!allow_cache)
+    dl_flags |= DOWNLOAD_FLAG_FORCE_REFRESH;
+
+  key_request =
+      downloadhelper_fetch_uri (adaptive_demux->download_helper,
+      key_url, referer, dl_flags, &err);
+  if (key_request == NULL) {
+    GST_WARNING_OBJECT (demux, "Failed to download key to decrypt data: %s",
+        err ? err->message : "error");
+    g_clear_error (&err);
+    goto out;
+  }
+
+  key_buffer = download_request_take_buffer (key_request);
+  download_request_unref (key_request);
+
+  key = g_new0 (GstHLSKey, 1);
+  if (gst_buffer_extract (key_buffer, 0, key->data, 16) < 16)
+    GST_WARNING_OBJECT (demux, "Download decryption key is too short!");
+
+  g_hash_table_insert (demux->keys, g_strdup (key_url), key);
+
+  gst_buffer_unref (key_buffer);
+
+out:
+
+  g_mutex_unlock (&demux->keys_lock);
+
+  if (key != NULL)
+    GST_MEMDUMP_OBJECT (demux, "Key", key->data, 16);
+
+  return key;
+}
+
+static gboolean
+gst_hls_demux_start_fragment (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  const GstHLSKey *key;
+  GstHLSMediaPlaylist *m3u8;
+
+  GST_DEBUG_OBJECT (stream, "Fragment starting");
+
+  gst_hls_demux_stream_clear_pending_data (hls_stream, FALSE);
+
+  /* If no decryption is needed, there's nothing to be done here */
+  if (hls_stream->current_key == NULL)
+    return TRUE;
+
+  m3u8 = hls_stream->playlist;
+
+  key = gst_hls_demux_get_key (hlsdemux, hls_stream->current_key,
+      m3u8->uri, m3u8->allowcache);
+
+  if (key == NULL)
+    goto key_failed;
+
+  if (!gst_hls_demux_stream_decrypt_start (hls_stream, key->data,
+          hls_stream->current_iv))
+    goto decrypt_start_failed;
+
+  return TRUE;
+
+key_failed:
+  {
+    GST_ELEMENT_ERROR (demux, STREAM, DECRYPT_NOKEY,
+        ("Couldn't retrieve key for decryption"), (NULL));
+    GST_WARNING_OBJECT (demux, "Failed to decrypt data");
+    return FALSE;
+  }
+decrypt_start_failed:
+  {
+    GST_ELEMENT_ERROR (demux, STREAM, DECRYPT, ("Failed to start decrypt"),
+        ("Couldn't set key and IV or plugin was built without crypto library"));
+    return FALSE;
+  }
+}
+
+static void
+gst_hls_demux_start_rendition_streams (GstHLSDemux * hlsdemux)
+{
+  GstAdaptiveDemux *demux = (GstAdaptiveDemux *) hlsdemux;
+  GList *tmp;
+
+  for (tmp = demux->input_period->streams; tmp; tmp = tmp->next) {
+    GstAdaptiveDemux2Stream *stream = (GstAdaptiveDemux2Stream *) tmp->data;
+    GstHLSDemuxStream *hls_stream = (GstHLSDemuxStream *) stream;
+
+    if (!hls_stream->is_variant
+        && gst_adaptive_demux2_stream_is_selected (stream))
+      gst_adaptive_demux2_stream_start (stream);
+  }
+}
+
+static GstHLSParserType
+caps_to_parser_type (const GstCaps * caps)
+{
+  const GstStructure *s = gst_caps_get_structure (caps, 0);
+
+  if (gst_structure_has_name (s, "video/mpegts"))
+    return GST_HLS_PARSER_MPEGTS;
+  if (gst_structure_has_name (s, "application/x-id3"))
+    return GST_HLS_PARSER_ID3;
+  if (gst_structure_has_name (s, "application/x-subtitle-vtt"))
+    return GST_HLS_PARSER_WEBVTT;
+  if (gst_structure_has_name (s, "video/quicktime"))
+    return GST_HLS_PARSER_ISOBMFF;
+
+  return GST_HLS_PARSER_NONE;
+}
+
+/* Identify the nature of data for this stream
+ *
+ * Will also setup the appropriate parser (tsreader) if needed
+ *
+ * Returns TRUE if we are done with typefinding */
+static gboolean
+gst_hls_demux_typefind_stream (GstHLSDemux * hlsdemux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer, gboolean at_eos,
+    GstFlowReturn * ret)
+{
+  GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);   // FIXME: pass HlsStream into function
+  GstCaps *caps = NULL;
+  guint buffer_size;
+  GstTypeFindProbability prob = GST_TYPE_FIND_NONE;
+  GstMapInfo info;
+
+  if (hls_stream->pending_typefind_buffer)
+    buffer = gst_buffer_append (hls_stream->pending_typefind_buffer, buffer);
+  hls_stream->pending_typefind_buffer = NULL;
+
+  gst_buffer_map (buffer, &info, GST_MAP_READ);
+  buffer_size = info.size;
+
+  /* Typefind could miss if buffer is too small. In this case we
+   * will retry later */
+  if (buffer_size >= (2 * 1024) || at_eos) {
+    caps =
+        gst_type_find_helper_for_data (GST_OBJECT_CAST (hlsdemux), info.data,
+        info.size, &prob);
+  }
+
+  if (G_UNLIKELY (!caps)) {
+    /* Won't need this mapping any more all paths return inside this if() */
+    gst_buffer_unmap (buffer, &info);
+
+    /* Only fail typefinding if we already a good amount of data
+     * and we still don't know the type */
+    if (buffer_size > (2 * 1024 * 1024) || at_eos) {
+      GST_ELEMENT_ERROR (hlsdemux, STREAM, TYPE_NOT_FOUND,
+          ("Could not determine type of stream"), (NULL));
+      gst_buffer_unref (buffer);
+      *ret = GST_FLOW_NOT_NEGOTIATED;
+    } else {
+      GST_LOG_OBJECT (stream, "Not enough data to typefind");
+      hls_stream->pending_typefind_buffer = buffer;
+      *ret = GST_FLOW_OK;
+    }
+    return FALSE;
+  }
+
+  GST_DEBUG_OBJECT (stream,
+      "Typefind result: %" GST_PTR_FORMAT " prob:%d", caps, prob);
+
+  if (hls_stream->parser_type == GST_HLS_PARSER_NONE) {
+    hls_stream->parser_type = caps_to_parser_type (caps);
+    if (hls_stream->parser_type == GST_HLS_PARSER_NONE) {
+      GST_WARNING_OBJECT (stream,
+          "Unsupported stream type %" GST_PTR_FORMAT, caps);
+      GST_MEMDUMP_OBJECT (stream, "unknown data", info.data,
+          MIN (info.size, 128));
+      *ret = GST_FLOW_ERROR;
+      return FALSE;
+    }
+    if (hls_stream->parser_type == GST_HLS_PARSER_ISOBMFF)
+      hls_stream->presentation_offset = 0;
+  }
+
+  gst_adaptive_demux2_stream_set_caps (stream, caps);
+
+  hls_stream->do_typefind = FALSE;
+
+  gst_buffer_unmap (buffer, &info);
+
+  /* We are done with typefinding */
+  *ret = GST_FLOW_OK;
+  return TRUE;
+}
+
+GstHLSTimeMap *
+gst_hls_find_time_map (GstHLSDemux * demux, gint64 dsn)
+{
+  GList *tmp;
+
+  GST_LOG_OBJECT (demux, "dsn:%" G_GINT64_FORMAT, dsn);
+
+  for (tmp = demux->mappings; tmp; tmp = tmp->next) {
+    GstHLSTimeMap *map = tmp->data;
+    if (map->dsn == dsn)
+      return map;
+  }
+
+  return NULL;
+}
+
+/* Compute the stream time for the given internal time, based on the provided
+ * time map.
+ *
+ * Will handle mpeg-ts wraparound. */
+GstClockTimeDiff
+gst_hls_internal_to_stream_time (GstHLSTimeMap * map,
+    GstClockTime internal_time)
+{
+  if (map->internal_time == GST_CLOCK_TIME_NONE)
+    return GST_CLOCK_STIME_NONE;
+
+  /* Handle MPEG-TS Wraparound */
+  if (internal_time < map->internal_time &&
+      map->internal_time - internal_time > (MPEG_TS_MAX_PTS / 2))
+    internal_time += MPEG_TS_MAX_PTS;
+
+  return (map->stream_time + internal_time - map->internal_time);
+}
+
+/* Handle the internal time discovered on a segment.
+ *
+ * This function is called by the individual buffer parsers once they have
+ * extracted that internal time (which is most of the time based on mpegts time,
+ * but can also be ISOBMFF pts).
+ *
+ * This will update the time map when appropriate.
+ *
+ * If a synchronization issue is detected, the appropriate steps will be taken
+ * and the RESYNC return value will be returned
+ */
+GstHLSParserResult
+gst_hlsdemux_handle_internal_time (GstHLSDemux * demux,
+    GstHLSDemuxStream * hls_stream, GstClockTime internal_time)
+{
+  GstM3U8MediaSegment *current_segment = hls_stream->current_segment;
+  GstHLSTimeMap *map;
+  GstClockTimeDiff current_stream_time;
+  GstClockTimeDiff real_stream_time, difference;
+
+  g_return_val_if_fail (current_segment != NULL, GST_HLS_PARSER_RESULT_ERROR);
+
+  current_stream_time = current_segment->stream_time;
+
+  GST_DEBUG_OBJECT (hls_stream,
+      "Got internal time %" GST_TIME_FORMAT " for current segment stream time %"
+      GST_STIME_FORMAT, GST_TIME_ARGS (internal_time),
+      GST_STIME_ARGS (current_stream_time));
+
+  map = gst_hls_find_time_map (demux, current_segment->discont_sequence);
+
+  /* Time mappings will always be created upon initial parsing and when advancing */
+  g_assert (map);
+
+  /* Handle the first internal time of a discont sequence. We can only store/use
+   * those values for variant streams. */
+  if (!GST_CLOCK_TIME_IS_VALID (map->internal_time)) {
+    if (!hls_stream->is_variant) {
+      GST_WARNING_OBJECT (hls_stream,
+          "Got data from a new discont sequence on a rendition stream, can't validate stream time");
+      return GST_HLS_PARSER_RESULT_DONE;
+    }
+    GST_DEBUG_OBJECT (hls_stream,
+        "Updating time map dsn:%" G_GINT64_FORMAT " stream_time:%"
+        GST_STIME_FORMAT " internal_time:%" GST_TIME_FORMAT, map->dsn,
+        GST_STIME_ARGS (current_stream_time), GST_TIME_ARGS (internal_time));
+    /* The stream time for a mapping should always be positive ! */
+    g_assert (current_stream_time >= 0);
+
+    if (hls_stream->parser_type == GST_HLS_PARSER_ISOBMFF)
+      hls_stream->presentation_offset = internal_time;
+
+    map->stream_time = current_stream_time;
+    map->internal_time = internal_time;
+
+    gst_hls_demux_start_rendition_streams (demux);
+    return GST_HLS_PARSER_RESULT_DONE;
+  }
+
+  /* The information in a discont is always valid */
+  if (current_segment->discont) {
+    GST_DEBUG_OBJECT (hls_stream,
+        "DISCONT segment, Updating time map to stream_time:%" GST_STIME_FORMAT
+        " internal_time:%" GST_TIME_FORMAT, GST_STIME_ARGS (internal_time),
+        GST_TIME_ARGS (current_stream_time));
+    map->stream_time = current_stream_time;
+    map->internal_time = internal_time;
+    return GST_HLS_PARSER_RESULT_DONE;
+  }
+
+  /* Check if the segment is the expected one */
+  real_stream_time = gst_hls_internal_to_stream_time (map, internal_time);
+  difference = current_stream_time - real_stream_time;
+  GST_DEBUG_OBJECT (hls_stream,
+      "Segment contains stream time %" GST_STIME_FORMAT
+      " difference against expected : %" GST_STIME_FORMAT,
+      GST_STIME_ARGS (real_stream_time), GST_STIME_ARGS (difference));
+
+  if (ABS (difference) > 10 * GST_MSECOND) {
+    /* Update the value */
+    GST_DEBUG_OBJECT (hls_stream,
+        "Updating current stream time to %" GST_STIME_FORMAT,
+        GST_STIME_ARGS (real_stream_time));
+    current_segment->stream_time = real_stream_time;
+
+    gst_hls_media_playlist_recalculate_stream_time (hls_stream->playlist,
+        hls_stream->current_segment);
+    gst_hls_media_playlist_dump (hls_stream->playlist);
+
+    if (ABS (difference) > (hls_stream->current_segment->duration / 2)) {
+      GstAdaptiveDemux2Stream *stream = (GstAdaptiveDemux2Stream *) hls_stream;
+      GstM3U8MediaSegment *actual_segment;
+
+      /* We are at the wrong segment, try to figure out the *actual* segment */
+      GST_DEBUG_OBJECT (hls_stream,
+          "Trying to seek to the correct segment for %" GST_STIME_FORMAT,
+          GST_STIME_ARGS (current_stream_time));
+      actual_segment =
+          gst_hls_media_playlist_seek (hls_stream->playlist, TRUE,
+          GST_SEEK_FLAG_SNAP_NEAREST, current_stream_time);
+
+      if (actual_segment) {
+        GST_DEBUG_OBJECT (hls_stream, "Synced to position %" GST_STIME_FORMAT,
+            GST_STIME_ARGS (actual_segment->stream_time));
+        gst_m3u8_media_segment_unref (hls_stream->current_segment);
+        hls_stream->current_segment = actual_segment;
+        /* Ask parent class to restart this fragment */
+        return GST_HLS_PARSER_RESULT_RESYNC;
+      }
+
+      GST_WARNING_OBJECT (hls_stream,
+          "Could not find a replacement stream, carrying on with segment");
+      stream->discont = TRUE;
+      stream->fragment.stream_time = real_stream_time;
+    }
+  }
+
+  return GST_HLS_PARSER_RESULT_DONE;
+}
+
+static GstHLSParserResult
+gst_hls_demux_handle_buffer_content (GstHLSDemux * demux,
+    GstHLSDemuxStream * hls_stream, gboolean draining, GstBuffer ** buffer)
+{
+  GstHLSTimeMap *map;
+  GstAdaptiveDemux2Stream *stream = (GstAdaptiveDemux2Stream *) hls_stream;
+  GstClockTimeDiff current_stream_time =
+      hls_stream->current_segment->stream_time;
+  GstClockTime current_duration = hls_stream->current_segment->duration;
+  GstHLSParserResult parser_ret;
+
+  GST_LOG_OBJECT (stream,
+      "stream_time:%" GST_STIME_FORMAT " duration:%" GST_TIME_FORMAT
+      " discont:%d draining:%d header:%d index:%d",
+      GST_STIME_ARGS (current_stream_time), GST_TIME_ARGS (current_duration),
+      hls_stream->current_segment->discont, draining,
+      stream->downloading_header, stream->downloading_index);
+
+  /* FIXME : Replace the boolean parser return value (and this function's return
+   *  value) by an enum which clearly specifies whether:
+   *
+   * * The content parsing happened succesfully and it no longer needs to be
+   *   called for the remainder of this fragment
+   * * More data is needed in order to parse the data
+   * * There was a fatal error parsing the contents (ex: invalid/incompatible
+   *   content)
+   * * The computed fragment stream time is out of sync
+   */
+
+  g_assert (demux->mappings);
+  map =
+      gst_hls_find_time_map (demux,
+      hls_stream->current_segment->discont_sequence);
+  if (!map) {
+    /* For rendition streams, we can't do anything without time mapping */
+    if (!hls_stream->is_variant) {
+      GST_DEBUG_OBJECT (stream,
+          "No available time mapping for dsn:%" G_GINT64_FORMAT
+          " using estimated stream time",
+          hls_stream->current_segment->discont_sequence);
+      goto out_done;
+    }
+
+    /* Variants will be able to fill in the the time mapping, so we can carry on without a time mapping */
+  } else {
+    GST_DEBUG_OBJECT (stream,
+        "Using mapping dsn:%" G_GINT64_FORMAT " stream_time:%" GST_TIME_FORMAT
+        " internal_time:%" GST_TIME_FORMAT, map->dsn,
+        GST_TIME_ARGS (map->stream_time), GST_TIME_ARGS (map->internal_time));
+  }
+
+  switch (hls_stream->parser_type) {
+    case GST_HLS_PARSER_MPEGTS:
+      parser_ret =
+          gst_hlsdemux_handle_content_mpegts (demux, hls_stream, draining,
+          buffer);
+      break;
+    case GST_HLS_PARSER_ID3:
+      parser_ret =
+          gst_hlsdemux_handle_content_id3 (demux, hls_stream, draining, buffer);
+      break;
+    case GST_HLS_PARSER_WEBVTT:
+    {
+      /* Furthermore it will handle timeshifting itself */
+      parser_ret =
+          gst_hlsdemux_handle_content_webvtt (demux, hls_stream, draining,
+          buffer);
+      break;
+    }
+    case GST_HLS_PARSER_ISOBMFF:
+      parser_ret =
+          gst_hlsdemux_handle_content_isobmff (demux, hls_stream, draining,
+          buffer);
+      break;
+    case GST_HLS_PARSER_NONE:
+    default:
+    {
+      GST_ERROR_OBJECT (stream, "Unknown stream type");
+      goto out_error;
+    }
+  }
+
+  if (parser_ret == GST_HLS_PARSER_RESULT_NEED_MORE_DATA) {
+    if (stream->downloading_index || stream->downloading_header)
+      goto out_need_more;
+    /* Else if we're draining, it's an error */
+    if (draining)
+      goto out_error;
+    /* Else we just need more data */
+    goto out_need_more;
+  }
+
+  if (parser_ret == GST_HLS_PARSER_RESULT_ERROR)
+    goto out_error;
+
+  if (parser_ret == GST_HLS_PARSER_RESULT_RESYNC)
+    goto out_resync;
+
+out_done:
+  GST_DEBUG_OBJECT (stream, "Done. Finished parsing");
+  return GST_HLS_PARSER_RESULT_DONE;
+
+out_error:
+  GST_DEBUG_OBJECT (stream, "Done. Error while parsing");
+  return GST_HLS_PARSER_RESULT_ERROR;
+
+out_need_more:
+  GST_DEBUG_OBJECT (stream, "Done. Need more data");
+  return GST_HLS_PARSER_RESULT_NEED_MORE_DATA;
+
+out_resync:
+  GST_DEBUG_OBJECT (stream, "Done. Resync required");
+  return GST_HLS_PARSER_RESULT_RESYNC;
+}
+
+static GstFlowReturn
+gst_hls_demux_handle_buffer (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer, gboolean at_eos)
+{
+  GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);   // FIXME: pass HlsStream into function
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  GstFlowReturn ret = GST_FLOW_OK;
+
+  g_assert (hls_stream->current_segment);
+  GST_DEBUG_OBJECT (stream,
+      "buffer:%p at_eos:%d do_typefind:%d uri:%s", buffer, at_eos,
+      hls_stream->do_typefind, hls_stream->current_segment->uri);
+
+  if (buffer == NULL)
+    goto out;
+
+  /* If we need to do typefind and we're not done with it (or we errored), return */
+  if (G_UNLIKELY (hls_stream->do_typefind) &&
+      !gst_hls_demux_typefind_stream (hlsdemux, stream, buffer, at_eos, &ret))
+    goto out;
+  g_assert (hls_stream->pending_typefind_buffer == NULL);
+
+  if (hls_stream->process_buffer_content) {
+    GstHLSParserResult parse_ret;
+
+    if (hls_stream->pending_segment_data) {
+      buffer = gst_buffer_append (hls_stream->pending_segment_data, buffer);
+      hls_stream->pending_segment_data = NULL;
+    }
+
+    /* Try to get the timing information */
+    parse_ret =
+        gst_hls_demux_handle_buffer_content (hlsdemux, hls_stream, at_eos,
+        &buffer);
+
+    switch (parse_ret) {
+      case GST_HLS_PARSER_RESULT_NEED_MORE_DATA:
+        /* If we don't have enough, store and return */
+        hls_stream->pending_segment_data = buffer;
+        hls_stream->pending_data_is_header =
+            (stream->downloading_header == TRUE);
+        if (hls_stream->pending_data_is_header)
+          stream->send_segment = TRUE;
+        goto out;
+      case GST_HLS_PARSER_RESULT_ERROR:
+        /* Error, drop buffer and return */
+        gst_buffer_unref (buffer);
+        ret = GST_FLOW_ERROR;
+        goto out;
+      case GST_HLS_PARSER_RESULT_RESYNC:
+        /* Resync, drop buffer and return */
+        gst_buffer_unref (buffer);
+        ret = GST_ADAPTIVE_DEMUX_FLOW_RESTART_FRAGMENT;
+        goto out;
+      case GST_HLS_PARSER_RESULT_DONE:
+        /* Done parsing, carry on */
+        hls_stream->process_buffer_content = FALSE;
+        break;
+    }
+  }
+
+  if (!buffer)
+    goto out;
+
+  buffer = gst_buffer_make_writable (buffer);
+
+  GST_BUFFER_OFFSET (buffer) = hls_stream->current_offset;
+  hls_stream->current_offset += gst_buffer_get_size (buffer);
+  GST_BUFFER_OFFSET_END (buffer) = hls_stream->current_offset;
+
+  GST_DEBUG_OBJECT (stream, "We have a buffer, pushing: %" GST_PTR_FORMAT,
+      buffer);
+
+  ret = gst_adaptive_demux2_stream_push_buffer (stream, buffer);
+
+out:
+  GST_DEBUG_OBJECT (stream, "Returning %s", gst_flow_get_name (ret));
+  return ret;
+}
+
+static GstFlowReturn
+gst_hls_demux_finish_fragment (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);   // FIXME: pass HlsStream into function
+  GstFlowReturn ret = GST_FLOW_OK;
+
+  GST_DEBUG_OBJECT (stream, "Finishing fragment uri:%s",
+      hls_stream->current_segment->uri);
+
+  /* Drain all pending data */
+  if (hls_stream->current_key)
+    gst_hls_demux_stream_decrypt_end (hls_stream);
+
+  if (stream->last_ret == GST_FLOW_OK) {
+    if (hls_stream->pending_decrypted_buffer) {
+      if (hls_stream->current_key) {
+        GstMapInfo info;
+        gssize unpadded_size;
+
+        /* Handle pkcs7 unpadding here */
+        gst_buffer_map (hls_stream->pending_decrypted_buffer, &info,
+            GST_MAP_READ);
+        unpadded_size = info.size - info.data[info.size - 1];
+        gst_buffer_unmap (hls_stream->pending_decrypted_buffer, &info);
+
+        gst_buffer_resize (hls_stream->pending_decrypted_buffer, 0,
+            unpadded_size);
+      }
+
+      ret =
+          gst_hls_demux_handle_buffer (demux, stream,
+          hls_stream->pending_decrypted_buffer, TRUE);
+      hls_stream->pending_decrypted_buffer = NULL;
+    }
+
+    if (ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED) {
+      if (G_UNLIKELY (hls_stream->pending_typefind_buffer)) {
+        GstBuffer *buf = hls_stream->pending_typefind_buffer;
+        hls_stream->pending_typefind_buffer = NULL;
+
+        gst_hls_demux_handle_buffer (demux, stream, buf, TRUE);
+      }
+
+      if (hls_stream->pending_segment_data) {
+        GstBuffer *buf = hls_stream->pending_segment_data;
+        hls_stream->pending_segment_data = NULL;
+
+        ret = gst_hls_demux_handle_buffer (demux, stream, buf, TRUE);
+      }
+    }
+  }
+
+  gst_hls_demux_stream_clear_pending_data (hls_stream, FALSE);
+
+  if (G_UNLIKELY (stream->downloading_header || stream->downloading_index))
+    return GST_FLOW_OK;
+
+  if (ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED)
+    return gst_adaptive_demux2_stream_advance_fragment (demux, stream,
+        hls_stream->current_segment->duration);
+  return ret;
+}
+
+static GstFlowReturn
+gst_hls_demux_data_received (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer)
+{
+  GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+
+  if (hls_stream->current_offset == -1)
+    hls_stream->current_offset = 0;
+
+  /* Is it encrypted? */
+  if (hls_stream->current_key) {
+    GError *err = NULL;
+    gsize size;
+    GstBuffer *decrypted_buffer;
+    GstBuffer *tmp_buffer;
+
+    if (hls_stream->pending_encrypted_data == NULL)
+      hls_stream->pending_encrypted_data = gst_adapter_new ();
+
+    gst_adapter_push (hls_stream->pending_encrypted_data, buffer);
+    size = gst_adapter_available (hls_stream->pending_encrypted_data);
+
+    /* must be a multiple of 16 */
+    size &= (~0xF);
+
+    if (size == 0) {
+      return GST_FLOW_OK;
+    }
+
+    buffer = gst_adapter_take_buffer (hls_stream->pending_encrypted_data, size);
+    decrypted_buffer =
+        gst_hls_demux_decrypt_fragment (hlsdemux, hls_stream, buffer, &err);
+    if (err) {
+      GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Failed to decrypt buffer"),
+          ("decryption failed %s", err->message));
+      g_error_free (err);
+      return GST_FLOW_ERROR;
+    }
+
+    tmp_buffer = hls_stream->pending_decrypted_buffer;
+    hls_stream->pending_decrypted_buffer = decrypted_buffer;
+    buffer = tmp_buffer;
+    if (!buffer)
+      return GST_FLOW_OK;
+  }
+
+  return gst_hls_demux_handle_buffer (demux, stream, buffer, FALSE);
+}
+
+static void
+gst_hls_demux_stream_finalize (GObject * object)
+{
+  GstAdaptiveDemux2Stream *stream = (GstAdaptiveDemux2Stream *) object;
+  GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (object);
+  GstHLSDemux *hlsdemux = (GstHLSDemux *) stream->demux;
+
+  if (hls_stream == hlsdemux->main_stream)
+    hlsdemux->main_stream = NULL;
+
+  if (hls_stream->playlist) {
+    gst_hls_media_playlist_unref (hls_stream->playlist);
+    hls_stream->playlist = NULL;
+  }
+
+  if (hls_stream->pending_encrypted_data)
+    g_object_unref (hls_stream->pending_encrypted_data);
+
+  gst_buffer_replace (&hls_stream->pending_decrypted_buffer, NULL);
+  gst_buffer_replace (&hls_stream->pending_typefind_buffer, NULL);
+  gst_buffer_replace (&hls_stream->pending_segment_data, NULL);
+
+  if (hls_stream->current_key) {
+    g_free (hls_stream->current_key);
+    hls_stream->current_key = NULL;
+  }
+  if (hls_stream->current_iv) {
+    g_free (hls_stream->current_iv);
+    hls_stream->current_iv = NULL;
+  }
+  if (hls_stream->current_rendition) {
+    gst_hls_rendition_stream_unref (hls_stream->current_rendition);
+    hls_stream->current_rendition = NULL;
+  }
+  if (hls_stream->pending_rendition) {
+    gst_hls_rendition_stream_unref (hls_stream->pending_rendition);
+    hls_stream->pending_rendition = NULL;
+  }
+  gst_hls_demux_stream_decrypt_end (hls_stream);
+
+  G_OBJECT_CLASS (stream_parent_class)->finalize (object);
+}
+
+static gboolean
+gst_hls_demux_stream_has_next_fragment (GstAdaptiveDemux2Stream * stream)
+{
+  GstHLSDemuxStream *hls_stream = (GstHLSDemuxStream *) stream;
+
+  GST_DEBUG_OBJECT (stream, "has next ?");
+
+  return gst_hls_media_playlist_has_next_fragment (hls_stream->playlist,
+      hls_stream->current_segment, stream->demux->segment.rate > 0);
+}
+
+static GstFlowReturn
+gst_hls_demux_advance_fragment (GstAdaptiveDemux2Stream * stream)
+{
+  GstHLSDemuxStream *hlsdemux_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+  GstHLSDemux *hlsdemux = (GstHLSDemux *) stream->demux;
+  GstM3U8MediaSegment *new_segment = NULL;
+
+  GST_DEBUG_OBJECT (stream,
+      "Current segment sn:%" G_GINT64_FORMAT " stream_time:%" GST_STIME_FORMAT
+      " uri:%s", hlsdemux_stream->current_segment->sequence,
+      GST_STIME_ARGS (hlsdemux_stream->current_segment->stream_time),
+      hlsdemux_stream->current_segment->uri);
+
+  new_segment =
+      gst_hls_media_playlist_advance_fragment (hlsdemux_stream->playlist,
+      hlsdemux_stream->current_segment, stream->demux->segment.rate > 0);
+  if (new_segment) {
+    hlsdemux_stream->reset_pts = FALSE;
+    if (new_segment->discont_sequence !=
+        hlsdemux_stream->current_segment->discont_sequence)
+      gst_hls_demux_add_time_mapping (hlsdemux, new_segment->discont_sequence,
+          new_segment->stream_time, new_segment->datetime);
+    gst_m3u8_media_segment_unref (hlsdemux_stream->current_segment);
+    hlsdemux_stream->current_segment = new_segment;
+    GST_DEBUG_OBJECT (stream,
+        "Advanced to segment sn:%" G_GINT64_FORMAT " stream_time:%"
+        GST_STIME_FORMAT " uri:%s", hlsdemux_stream->current_segment->sequence,
+        GST_STIME_ARGS (hlsdemux_stream->current_segment->stream_time),
+        hlsdemux_stream->current_segment->uri);
+    hlsdemux_stream->pending_advance = FALSE;
+    return GST_FLOW_OK;
+  }
+
+  GST_LOG_OBJECT (stream, "Could not advance to next fragment");
+  if (GST_HLS_MEDIA_PLAYLIST_IS_LIVE (hlsdemux_stream->playlist)) {
+    hlsdemux_stream->pending_advance = TRUE;
+    return GST_FLOW_OK;
+  }
+
+  return GST_FLOW_EOS;
+}
+
+static GstHLSMediaPlaylist *
+download_media_playlist (GstHLSDemux * demux, gchar * uri, GError ** err,
+    GstHLSMediaPlaylist * current)
+{
+  GstAdaptiveDemux *adaptive_demux;
+  const gchar *main_uri;
+  DownloadRequest *download;
+  GstBuffer *buf;
+  gchar *playlist_data;
+  GstHLSMediaPlaylist *playlist = NULL;
+  gchar *base_uri;
+  gboolean playlist_uri_change = FALSE;
+
+  adaptive_demux = GST_ADAPTIVE_DEMUX (demux);
+  main_uri = gst_adaptive_demux_get_manifest_ref_uri (adaptive_demux);
+
+  /* If there's no previous playlist, or the URI changed this
+   * is not a refresh/update but a switch to a new playlist */
+  playlist_uri_change = (current == NULL || g_strcmp0 (uri, current->uri) != 0);
+
+  if (!playlist_uri_change) {
+    GST_LOG_OBJECT (demux, "Updating the playlist");
+  }
+
+  download =
+      downloadhelper_fetch_uri (adaptive_demux->download_helper,
+      uri, main_uri, DOWNLOAD_FLAG_COMPRESS | DOWNLOAD_FLAG_FORCE_REFRESH, err);
+
+  if (download == NULL)
+    return NULL;
+
+  /* Set the base URI of the playlist to the redirect target if any */
+  if (download->redirect_permanent && download->redirect_uri) {
+    uri = g_strdup (download->redirect_uri);
+    base_uri = NULL;
+  } else {
+    uri = g_strdup (download->uri);
+    base_uri = g_strdup (download->redirect_uri);
+  }
+
+  if (download->state == DOWNLOAD_REQUEST_STATE_ERROR) {
+    GST_WARNING_OBJECT (demux,
+        "Couldn't get the playlist, got HTTP status code %d",
+        download->status_code);
+    download_request_unref (download);
+    if (err)
+      g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE,
+          "Couldn't download the playlist");
+    goto out;
+  }
+  buf = download_request_take_buffer (download);
+  download_request_unref (download);
+
+  /* there should be a buf if there wasn't an error (handled above) */
+  g_assert (buf);
+
+  playlist_data = gst_hls_buf_to_utf8_text (buf);
+  gst_buffer_unref (buf);
+
+  if (playlist_data == NULL) {
+    GST_WARNING_OBJECT (demux, "Couldn't validate playlist encoding");
+    if (err)
+      g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE,
+          "Couldn't validate playlist encoding");
+    goto out;
+  }
+
+  if (!playlist_uri_change && current
+      && gst_hls_media_playlist_has_same_data (current, playlist_data)) {
+    GST_DEBUG_OBJECT (demux, "Same playlist data");
+    playlist = gst_hls_media_playlist_ref (current);
+    playlist->reloaded = TRUE;
+    g_free (playlist_data);
+  } else {
+    playlist = gst_hls_media_playlist_parse (playlist_data, uri, base_uri);
+    if (!playlist) {
+      GST_WARNING_OBJECT (demux, "Couldn't parse playlist");
+      if (err)
+        g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED,
+            "Couldn't parse playlist");
+    }
+  }
+
+out:
+  g_free (uri);
+  g_free (base_uri);
+
+  return playlist;
+}
+
+static GstHLSTimeMap *
+gst_hls_time_map_new (void)
+{
+  GstHLSTimeMap *map = g_new0 (GstHLSTimeMap, 1);
+
+  map->stream_time = GST_CLOCK_TIME_NONE;
+  map->internal_time = GST_CLOCK_TIME_NONE;
+
+  return map;
+}
+
+static GstHLSTimeMap *
+gst_hls_demux_add_time_mapping (GstHLSDemux * demux, gint64 dsn,
+    GstClockTimeDiff stream_time, GDateTime * pdt)
+{
+#ifndef GST_DISABLE_GST_DEBUG
+  gchar *datestring = NULL;
+#endif
+  GstHLSTimeMap *map;
+  GList *tmp;
+
+  g_assert (stream_time >= 0);
+
+  /* Check if we don't already have a mapping for the given dsn */
+  for (tmp = demux->mappings; tmp; tmp = tmp->next) {
+    GstHLSTimeMap *map = tmp->data;
+
+    if (map->dsn == dsn) {
+#ifndef GST_DISABLE_GST_DEBUG
+      if (map->pdt)
+        datestring = g_date_time_format_iso8601 (map->pdt);
+      GST_DEBUG_OBJECT (demux,
+          "Returning existing mapping, dsn:%" G_GINT64_FORMAT " stream_time:%"
+          GST_TIME_FORMAT " internal_time:%" GST_TIME_FORMAT " pdt:%s",
+          map->dsn, GST_TIME_ARGS (map->stream_time),
+          GST_TIME_ARGS (map->internal_time), datestring);
+      g_free (datestring);
+#endif
+      return map;
+    }
+  }
+
+#ifndef GST_DISABLE_GST_DEBUG
+  if (pdt)
+    datestring = g_date_time_format_iso8601 (pdt);
+  GST_DEBUG_OBJECT (demux,
+      "New mapping, dsn:%" G_GINT64_FORMAT " stream_time:%" GST_TIME_FORMAT
+      " pdt:%s", dsn, GST_TIME_ARGS (stream_time), datestring);
+  g_free (datestring);
+#endif
+
+  map = gst_hls_time_map_new ();
+  map->dsn = dsn;
+  map->stream_time = stream_time;
+  if (pdt)
+    map->pdt = g_date_time_ref (pdt);
+
+  demux->mappings = g_list_append (demux->mappings, map);
+
+  return map;
+}
+
+static void
+setup_initial_playlist_and_mapping (GstHLSDemux * demux,
+    GstHLSMediaPlaylist * playlist)
+{
+  guint idx, len = playlist->segments->len;
+  GstHLSTimeMap *map = NULL;
+  GstM3U8MediaSegment *segment;
+  GstClockTimeDiff pos = 0;
+
+  GST_DEBUG_OBJECT (demux,
+      "Setting up initial variant segment and time mapping");
+
+  /* This is the initial variant playlist. We will use it to base all our timing
+   * from. */
+
+  for (idx = 0; idx < len; idx++) {
+    segment = g_ptr_array_index (playlist->segments, idx);
+
+    if (!map || segment->discont)
+      map =
+          gst_hls_demux_add_time_mapping (demux, segment->discont_sequence, pos,
+          segment->datetime);
+    segment->stream_time = pos;
+    pos += segment->duration;
+  }
+}
+
+static gboolean
+gst_hls_demux_stream_update_media_playlist (GstHLSDemux * demux,
+    GstHLSDemuxStream * stream, gchar ** uri, GError ** err)
+{
+  GstHLSMediaPlaylist *new_playlist;
+
+  GST_DEBUG_OBJECT (stream, "Updating %s", *uri);
+
+  new_playlist = download_media_playlist (demux, *uri, err, stream->playlist);
+  if (new_playlist == NULL) {
+    GST_WARNING_OBJECT (stream, "Could not get playlist '%s'", *uri);
+    return FALSE;
+  }
+
+  /* Check if a redirect happened */
+  if (g_strcmp0 (*uri, new_playlist->uri)) {
+    GST_DEBUG_OBJECT (stream, "Playlist URI update : '%s'  =>  '%s'", *uri,
+        new_playlist->uri);
+    g_free (*uri);
+    *uri = g_strdup (new_playlist->uri);
+  }
+
+  if (stream->current_segment) {
+    GstM3U8MediaSegment *new_segment;
+    GST_DEBUG_OBJECT (stream,
+        "Current segment sn:%" G_GINT64_FORMAT " stream_time:%" GST_STIME_FORMAT
+        " uri:%s", stream->current_segment->sequence,
+        GST_STIME_ARGS (stream->current_segment->stream_time),
+        stream->current_segment->uri);
+
+    /* Use best-effort techniques to find the correponding current media segment
+     * in the new playlist. This might be off in some cases, but it doesn't matter
+     * since we will be checking the embedded timestamp later */
+    new_segment =
+        gst_hls_media_playlist_sync_to_segment (new_playlist,
+        stream->current_segment);
+    if (new_segment) {
+      if (new_segment->discont_sequence !=
+          stream->current_segment->discont_sequence)
+        gst_hls_demux_add_time_mapping (demux, new_segment->discont_sequence,
+            new_segment->stream_time, new_segment->datetime);
+      /* This can happen in case of misaligned variants/renditions. Only warn about it */
+      if (new_segment->stream_time != stream->current_segment->stream_time)
+        GST_WARNING_OBJECT (stream,
+            "Returned segment stream time %" GST_STIME_FORMAT
+            " differs from current stream time %" GST_STIME_FORMAT,
+            GST_STIME_ARGS (new_segment->stream_time),
+            GST_STIME_ARGS (stream->current_segment->stream_time));
+    } else {
+      /* Not finding a matching segment only happens in live (otherwise we would
+       * have found a match by stream time). In order to fix this, we pick a new
+       * starting segment, give it the current segment stream time and hope for
+       * the best */
+      GST_WARNING_OBJECT (stream,
+          "Could not find a matching segment, picking a new initial one");
+      new_segment = gst_hls_media_playlist_get_starting_segment (new_playlist);
+      new_segment->stream_time = stream->current_segment->stream_time;
+      gst_hls_media_playlist_recalculate_stream_time (new_playlist,
+          new_segment);
+    }
+    gst_m3u8_media_segment_unref (stream->current_segment);
+    stream->current_segment = new_segment;
+  } else {
+    GST_DEBUG_OBJECT (stream, "No current segment, doing initial selection");
+    /* Initial choice */
+    stream->current_segment =
+        gst_hls_media_playlist_get_starting_segment (new_playlist);
+    setup_initial_playlist_and_mapping (demux, new_playlist);
+  }
+
+  if (stream->playlist)
+    gst_hls_media_playlist_unref (stream->playlist);
+  stream->playlist = new_playlist;
+  gst_hls_media_playlist_dump (new_playlist);
+
+  if (stream->current_segment) {
+    GST_DEBUG_OBJECT (stream,
+        "After update, current segment now sn:%" G_GINT64_FORMAT
+        " stream_time:%" GST_STIME_FORMAT " uri:%s",
+        stream->current_segment->sequence,
+        GST_STIME_ARGS (stream->current_segment->stream_time),
+        stream->current_segment->uri);
+  } else {
+    GST_DEBUG_OBJECT (stream, "No current segment selected");
+  }
+
+  return TRUE;
+}
+
+static gboolean
+gst_hls_demux_stream_update_rendition_playlist (GstHLSDemux * demux,
+    GstHLSDemuxStream * stream)
+{
+  GstHLSRenditionStream *target_rendition =
+      stream->pending_rendition ? stream->
+      pending_rendition : stream->current_rendition;
+  if (!gst_hls_demux_stream_update_media_playlist (demux, stream,
+          &target_rendition->uri, NULL))
+    return FALSE;
+
+  if (stream->pending_rendition) {
+    gst_hls_rendition_stream_unref (stream->current_rendition);
+    /* Stealing ref */
+    stream->current_rendition = stream->pending_rendition;
+    stream->pending_rendition = NULL;
+  }
+
+  stream->playlist_fetched = TRUE;
+
+  return TRUE;
+}
+
+static gboolean
+gst_hls_demux_stream_update_variant_playlist (GstHLSDemux * demux,
+    GstHLSDemuxStream * stream, GError ** err)
+{
+  GstHLSVariantStream *target_variant =
+      demux->pending_variant ? demux->pending_variant : demux->current_variant;
+
+  if (!gst_hls_demux_stream_update_media_playlist (demux, stream,
+          &target_variant->uri, err))
+    return FALSE;
+
+  if (demux->pending_variant) {
+    gst_hls_variant_stream_unref (demux->current_variant);
+    /* Stealing ref */
+    demux->current_variant = demux->pending_variant;
+    demux->pending_variant = NULL;
+  }
+
+  stream->playlist_fetched = TRUE;
+
+  return TRUE;
+}
+
+static GstFlowReturn
+gst_hls_demux_update_fragment_info (GstAdaptiveDemux2Stream * stream)
+{
+  GstHLSDemuxStream *hlsdemux_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+  GstAdaptiveDemux *demux = stream->demux;
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  GstM3U8MediaSegment *file;
+  gboolean discont;
+
+  /* If the rendition playlist needs to be updated, do it now */
+  if (!hlsdemux_stream->is_variant && !hlsdemux_stream->playlist_fetched) {
+    if (!gst_hls_demux_stream_update_rendition_playlist (hlsdemux,
+            hlsdemux_stream))
+      return GST_FLOW_ERROR;
+  }
+
+  if (hlsdemux_stream->pending_advance) {
+    GST_DEBUG_OBJECT (stream, "Applying pending advance_fragment");
+    gst_hls_demux_advance_fragment (stream);
+    /* If we are still waiting to advance, return EOS to trigger the appropriate
+     * playlist update (if live) or real EOS */
+    if (hlsdemux_stream->pending_advance)
+      return GST_FLOW_EOS;
+  }
+
+  GST_DEBUG_OBJECT (hlsdemux, "Updating for %s stream '%s'",
+      hlsdemux_stream->is_variant ? "MAIN" : "MEDIA",
+      hlsdemux_stream->is_variant ?
+      hlsdemux->current_variant->name : hlsdemux_stream->
+      current_rendition->name);
+
+  file = hlsdemux_stream->current_segment;
+
+  if (file == NULL) {
+    GST_INFO_OBJECT (hlsdemux, "This playlist doesn't contain more fragments");
+    return GST_FLOW_EOS;
+  }
+
+  discont = file->discont || stream->discont;
+
+  if (GST_ADAPTIVE_DEMUX2_STREAM_NEED_HEADER (stream) && file->init_file) {
+    GstM3U8InitFile *header_file = file->init_file;
+    stream->fragment.header_uri = g_strdup (header_file->uri);
+    stream->fragment.header_range_start = header_file->offset;
+    if (header_file->size != -1) {
+      stream->fragment.header_range_end =
+          header_file->offset + header_file->size - 1;
+    } else {
+      stream->fragment.header_range_end = -1;
+    }
+  }
+
+  /* set up our source for download */
+  if (hlsdemux_stream->reset_pts || discont || demux->segment.rate < 0.0) {
+    stream->fragment.stream_time = file->stream_time;
+  } else {
+    stream->fragment.stream_time = GST_CLOCK_STIME_NONE;
+  }
+
+  g_free (hlsdemux_stream->current_key);
+  hlsdemux_stream->current_key = g_strdup (file->key);
+  g_free (hlsdemux_stream->current_iv);
+  hlsdemux_stream->current_iv = g_memdup2 (file->iv, sizeof (file->iv));
+
+  g_free (stream->fragment.uri);
+  stream->fragment.uri = g_strdup (file->uri);
+
+  GST_DEBUG_OBJECT (stream, "Stream URI now %s", file->uri);
+
+  stream->fragment.range_start = file->offset;
+  if (file->size != -1)
+    stream->fragment.range_end = file->offset + file->size - 1;
+  else
+    stream->fragment.range_end = -1;
+
+  stream->fragment.duration = file->duration;
+
+  if (discont)
+    stream->discont = TRUE;
+
+  return GST_FLOW_OK;
+}
+
+static gboolean
+gst_hls_demux_stream_can_start (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstHLSDemux *hlsdemux = (GstHLSDemux *) demux;
+  GstHLSDemuxStream *hls_stream = (GstHLSDemuxStream *) stream;
+  GList *tmp;
+
+  GST_DEBUG_OBJECT (demux, "is_variant:%d mappings:%p", hls_stream->is_variant,
+      hlsdemux->mappings);
+
+  /* Variant streams can always start straight away */
+  if (hls_stream->is_variant)
+    return TRUE;
+
+  /* Renditions of the exact same type as the variant are pure alternatives,
+   * they must be started. This can happen for example with audio-only manifests
+   * where the initial stream selected is a rendition and not a variant */
+  if (hls_stream->rendition_type == hlsdemux->main_stream->rendition_type)
+    return TRUE;
+
+  /* Rendition streams only require delaying if we don't have time mappings yet */
+  if (!hlsdemux->mappings)
+    return FALSE;
+
+  /* We can start if we have at least one internal time observation */
+  for (tmp = hlsdemux->mappings; tmp; tmp = tmp->next) {
+    GstHLSTimeMap *map = tmp->data;
+    if (map->internal_time != GST_CLOCK_TIME_NONE)
+      return TRUE;
+  }
+
+  /* Otherwise we have to wait */
+  return FALSE;
+}
+
+/* Returns TRUE if the rendition stream switched group-id */
+static gboolean
+gst_hls_demux_update_rendition_stream (GstHLSDemux * hlsdemux,
+    GstHLSDemuxStream * hls_stream, GError ** err)
+{
+  gchar *current_group_id, *requested_group_id;
+  GstHLSRenditionStream *replacement_media = NULL;
+  GList *tmp;
+
+  /* There always should be a current variant set */
+  g_assert (hlsdemux->current_variant);
+  /* There always is a GstHLSRenditionStream set for rendition streams */
+  g_assert (hls_stream->current_rendition);
+
+  requested_group_id =
+      hlsdemux->current_variant->media_groups[hls_stream->
+      current_rendition->mtype];
+  current_group_id = hls_stream->current_rendition->group_id;
+
+  GST_DEBUG_OBJECT (hlsdemux,
+      "Checking playlist change for variant stream %s lang: %s current group-id: %s / requested group-id: %s",
+      gst_stream_type_get_name (hls_stream->rendition_type), hls_stream->lang,
+      current_group_id, requested_group_id);
+
+
+  if (!g_strcmp0 (requested_group_id, current_group_id)) {
+    GST_DEBUG_OBJECT (hlsdemux, "No change needed");
+    return FALSE;
+  }
+
+  GST_DEBUG_OBJECT (hlsdemux,
+      "group-id changed, looking for replacement playlist");
+
+  /* Need to switch/update */
+  for (tmp = hlsdemux->master->renditions; tmp; tmp = tmp->next) {
+    GstHLSRenditionStream *cand = tmp->data;
+
+    if (cand->mtype == hls_stream->current_rendition->mtype
+        && !g_strcmp0 (cand->lang, hls_stream->lang)
+        && !g_strcmp0 (cand->group_id, requested_group_id)) {
+      replacement_media = cand;
+      break;
+    }
+  }
+  if (!replacement_media) {
+    GST_ERROR_OBJECT (hlsdemux,
+        "Could not find a replacement playlist. Staying with previous one");
+    return FALSE;
+  }
+
+  GST_DEBUG_OBJECT (hlsdemux, "Use replacement playlist %s",
+      replacement_media->name);
+  hls_stream->playlist_fetched = FALSE;
+  if (hls_stream->pending_rendition) {
+    GST_ERROR_OBJECT (hlsdemux,
+        "Already had a pending rendition switch to '%s'",
+        hls_stream->pending_rendition->name);
+    gst_hls_rendition_stream_unref (hls_stream->pending_rendition);
+  }
+  hls_stream->pending_rendition =
+      gst_hls_rendition_stream_ref (replacement_media);
+  return TRUE;
+}
+
+static gboolean
+gst_hls_demux_select_bitrate (GstAdaptiveDemux2Stream * stream, guint64 bitrate)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (stream->demux);
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (stream->demux);
+  GstHLSDemuxStream *hls_stream = GST_HLS_DEMUX_STREAM_CAST (stream);
+
+  /* Fast-Path, no changes possible */
+  if (hlsdemux->master == NULL || hlsdemux->master->is_simple)
+    return FALSE;
+
+  if (hls_stream->is_variant) {
+    gdouble play_rate = gst_adaptive_demux_play_rate (demux);
+    gboolean changed = FALSE;
+
+    /* Handle variant streams */
+    GST_DEBUG_OBJECT (hlsdemux,
+        "Checking playlist change for main variant stream");
+    gst_hls_demux_change_playlist (hlsdemux, bitrate / MAX (1.0,
+            ABS (play_rate)), &changed);
+
+    GST_DEBUG_OBJECT (hlsdemux, "Returning changed: %d", changed);
+    return changed;
+  }
+
+  /* Handle rendition streams */
+  return gst_hls_demux_update_rendition_stream (hlsdemux, hls_stream, NULL);
+}
+
+static void
+gst_hls_demux_reset (GstAdaptiveDemux * ademux)
+{
+  GstHLSDemux *demux = GST_HLS_DEMUX_CAST (ademux);
+
+  GST_DEBUG_OBJECT (demux, "resetting");
+
+  if (demux->master) {
+    gst_hls_master_playlist_unref (demux->master);
+    demux->master = NULL;
+  }
+  if (demux->current_variant != NULL) {
+    gst_hls_variant_stream_unref (demux->current_variant);
+    demux->current_variant = NULL;
+  }
+  if (demux->pending_variant != NULL) {
+    gst_hls_variant_stream_unref (demux->pending_variant);
+    demux->pending_variant = NULL;
+  }
+
+  g_list_free_full (demux->mappings, g_free);
+  demux->mappings = NULL;
+
+  gst_hls_demux_clear_all_pending_data (demux);
+}
+
+/*
+ * update: TRUE only when requested from parent class (via
+ * ::demux_update_manifest() or ::change_playlist() ).
+ */
+static gboolean
+gst_hls_demux_update_playlist (GstHLSDemux * demux, gboolean update,
+    GError ** err)
+{
+  GstAdaptiveDemux *adaptive_demux = GST_ADAPTIVE_DEMUX (demux);
+
+  GST_DEBUG_OBJECT (demux, "update:%d", update);
+
+  /* Download and update the appropriate variant playlist (pending if any, else
+   * current) */
+  if (!gst_hls_demux_stream_update_variant_playlist (demux, demux->main_stream,
+          err))
+    return FALSE;
+
+  if (demux->main_stream->pending_advance) {
+    GST_DEBUG_OBJECT (demux, "Applying variant pending advance_fragment");
+    if (gst_hls_demux_advance_fragment ((GstAdaptiveDemux2Stream *)
+            demux->main_stream) != GST_FLOW_OK) {
+      /* We return now without updating variant streams for update since we
+       * couldn't advance. But we return TRUE since we *did* update the
+       * playlist */
+      GST_DEBUG_OBJECT (demux, "Couldn't apply pending advance yet");
+      return TRUE;
+    }
+  }
+
+  if (update && gst_hls_demux_is_live (adaptive_demux)) {
+    GList *tmp;
+    GST_DEBUG_OBJECT (demux,
+        "LIVE, Marking rendition streams to be updated next");
+    /* We're live, instruct all rendition medias to be updated next */
+    for (tmp = adaptive_demux->input_period->streams; tmp; tmp = tmp->next) {
+      GstHLSDemuxStream *hls_stream = tmp->data;
+      if (!hls_stream->is_variant)
+        hls_stream->playlist_fetched = FALSE;
+    }
+  }
+
+  return TRUE;
+}
+
+static gboolean
+gst_hls_demux_change_playlist (GstHLSDemux * demux, guint max_bitrate,
+    gboolean * changed)
+{
+  GstHLSVariantStream *lowest_variant, *lowest_ivariant;
+  GstHLSVariantStream *previous_variant, *new_variant;
+  gint old_bandwidth, new_bandwidth;
+  GstAdaptiveDemux *adaptive_demux = GST_ADAPTIVE_DEMUX_CAST (demux);
+  GstAdaptiveDemux2Stream *stream;
+
+  g_return_val_if_fail (demux->main_stream != NULL, FALSE);
+  stream = (GstAdaptiveDemux2Stream *) demux->main_stream;
+
+  /* Make sure we keep a reference in case we need to switch back */
+  previous_variant = gst_hls_variant_stream_ref (demux->current_variant);
+  new_variant =
+      gst_hls_master_playlist_get_variant_for_bitrate (demux->master,
+      demux->current_variant, max_bitrate, adaptive_demux->min_bitrate);
+
+retry_failover_protection:
+  old_bandwidth = previous_variant->bandwidth;
+  new_bandwidth = new_variant->bandwidth;
+
+  /* Don't do anything else if the playlist is the same */
+  if (new_bandwidth == old_bandwidth) {
+    gst_hls_variant_stream_unref (previous_variant);
+    return TRUE;
+  }
+
+  gst_hls_demux_set_current_variant (demux, new_variant);
+
+  GST_INFO_OBJECT (demux, "Client was on %dbps, max allowed is %dbps, switching"
+      " to bitrate %dbps", old_bandwidth, max_bitrate, new_bandwidth);
+
+  if (gst_hls_demux_update_playlist (demux, TRUE, NULL)) {
+    const gchar *main_uri;
+    gchar *uri = new_variant->uri;
+
+    main_uri = gst_adaptive_demux_get_manifest_ref_uri (adaptive_demux);
+    gst_element_post_message (GST_ELEMENT_CAST (demux),
+        gst_message_new_element (GST_OBJECT_CAST (demux),
+            gst_structure_new (GST_ADAPTIVE_DEMUX_STATISTICS_MESSAGE_NAME,
+                "manifest-uri", G_TYPE_STRING,
+                main_uri, "uri", G_TYPE_STRING,
+                uri, "bitrate", G_TYPE_INT, new_bandwidth, NULL)));
+    if (changed)
+      *changed = TRUE;
+    stream->discont = TRUE;
+  } else if (gst_adaptive_demux2_is_running (GST_ADAPTIVE_DEMUX_CAST (demux))) {
+    GstHLSVariantStream *failover_variant = NULL;
+    GList *failover;
+
+    GST_INFO_OBJECT (demux, "Unable to update playlist. Switching back");
+
+    /* we find variants by bitrate by going from highest to lowest, so it's
+     * possible that there's another variant with the same bitrate before the
+     * one selected which we can use as failover */
+    failover = g_list_find (demux->master->variants, new_variant);
+    if (failover != NULL)
+      failover = failover->prev;
+    if (failover != NULL)
+      failover_variant = failover->data;
+    if (failover_variant && new_bandwidth == failover_variant->bandwidth) {
+      new_variant = failover_variant;
+      goto retry_failover_protection;
+    }
+
+    gst_hls_demux_set_current_variant (demux, previous_variant);
+    /*  Try a lower bitrate (or stop if we just tried the lowest) */
+    if (previous_variant->iframe) {
+      lowest_ivariant = demux->master->iframe_variants->data;
+      if (new_bandwidth == lowest_ivariant->bandwidth)
+        return FALSE;
+    } else {
+      lowest_variant = demux->master->variants->data;
+      if (new_bandwidth == lowest_variant->bandwidth)
+        return FALSE;
+    }
+    return gst_hls_demux_change_playlist (demux, new_bandwidth - 1, changed);
+  }
+
+  gst_hls_variant_stream_unref (previous_variant);
+  return TRUE;
+}
+
+#if defined(HAVE_OPENSSL)
+static gboolean
+gst_hls_demux_stream_decrypt_start (GstHLSDemuxStream * stream,
+    const guint8 * key_data, const guint8 * iv_data)
+{
+  EVP_CIPHER_CTX *ctx;
+#if OPENSSL_VERSION_NUMBER < 0x10100000L
+  EVP_CIPHER_CTX_init (&stream->aes_ctx);
+  ctx = &stream->aes_ctx;
+#else
+  stream->aes_ctx = EVP_CIPHER_CTX_new ();
+  ctx = stream->aes_ctx;
+#endif
+  if (!EVP_DecryptInit_ex (ctx, EVP_aes_128_cbc (), NULL, key_data, iv_data))
+    return FALSE;
+  EVP_CIPHER_CTX_set_padding (ctx, 0);
+  return TRUE;
+}
+
+static gboolean
+decrypt_fragment (GstHLSDemuxStream * stream, gsize length,
+    const guint8 * encrypted_data, guint8 * decrypted_data)
+{
+  int len, flen = 0;
+  EVP_CIPHER_CTX *ctx;
+
+#if OPENSSL_VERSION_NUMBER < 0x10100000L
+  ctx = &stream->aes_ctx;
+#else
+  ctx = stream->aes_ctx;
+#endif
+
+  if (G_UNLIKELY (length > G_MAXINT || length % 16 != 0))
+    return FALSE;
+
+  len = (int) length;
+  if (!EVP_DecryptUpdate (ctx, decrypted_data, &len, encrypted_data, len))
+    return FALSE;
+  EVP_DecryptFinal_ex (ctx, decrypted_data + len, &flen);
+  g_return_val_if_fail (len + flen == length, FALSE);
+  return TRUE;
+}
+
+static void
+gst_hls_demux_stream_decrypt_end (GstHLSDemuxStream * stream)
+{
+#if OPENSSL_VERSION_NUMBER < 0x10100000L
+  EVP_CIPHER_CTX_cleanup (&stream->aes_ctx);
+#else
+  EVP_CIPHER_CTX_free (stream->aes_ctx);
+  stream->aes_ctx = NULL;
+#endif
+}
+
+#elif defined(HAVE_NETTLE)
+static gboolean
+gst_hls_demux_stream_decrypt_start (GstHLSDemuxStream * stream,
+    const guint8 * key_data, const guint8 * iv_data)
+{
+  aes128_set_decrypt_key (&stream->aes_ctx.ctx, key_data);
+  CBC_SET_IV (&stream->aes_ctx, iv_data);
+
+  return TRUE;
+}
+
+static gboolean
+decrypt_fragment (GstHLSDemuxStream * stream, gsize length,
+    const guint8 * encrypted_data, guint8 * decrypted_data)
+{
+  if (length % 16 != 0)
+    return FALSE;
+
+  CBC_DECRYPT (&stream->aes_ctx, aes128_decrypt, length, decrypted_data,
+      encrypted_data);
+
+  return TRUE;
+}
+
+static void
+gst_hls_demux_stream_decrypt_end (GstHLSDemuxStream * stream)
+{
+  /* NOP */
+}
+
+#elif defined(HAVE_LIBGCRYPT)
+static gboolean
+gst_hls_demux_stream_decrypt_start (GstHLSDemuxStream * stream,
+    const guint8 * key_data, const guint8 * iv_data)
+{
+  gcry_error_t err = 0;
+  gboolean ret = FALSE;
+
+  err =
+      gcry_cipher_open (&stream->aes_ctx, GCRY_CIPHER_AES128,
+      GCRY_CIPHER_MODE_CBC, 0);
+  if (err)
+    goto out;
+  err = gcry_cipher_setkey (stream->aes_ctx, key_data, 16);
+  if (err)
+    goto out;
+  err = gcry_cipher_setiv (stream->aes_ctx, iv_data, 16);
+  if (!err)
+    ret = TRUE;
+
+out:
+  if (!ret)
+    if (stream->aes_ctx)
+      gcry_cipher_close (stream->aes_ctx);
+
+  return ret;
+}
+
+static gboolean
+decrypt_fragment (GstHLSDemuxStream * stream, gsize length,
+    const guint8 * encrypted_data, guint8 * decrypted_data)
+{
+  gcry_error_t err = 0;
+
+  err = gcry_cipher_decrypt (stream->aes_ctx, decrypted_data, length,
+      encrypted_data, length);
+
+  return err == 0;
+}
+
+static void
+gst_hls_demux_stream_decrypt_end (GstHLSDemuxStream * stream)
+{
+  if (stream->aes_ctx) {
+    gcry_cipher_close (stream->aes_ctx);
+    stream->aes_ctx = NULL;
+  }
+}
+
+#else
+/* NO crypto available */
+static gboolean
+gst_hls_demux_stream_decrypt_start (GstHLSDemuxStream * stream,
+    const guint8 * key_data, const guint8 * iv_data)
+{
+  GST_ERROR ("No crypto available");
+  return FALSE;
+}
+
+static gboolean
+decrypt_fragment (GstHLSDemuxStream * stream, gsize length,
+    const guint8 * encrypted_data, guint8 * decrypted_data)
+{
+  GST_ERROR ("Cannot decrypt fragment, no crypto available");
+  return FALSE;
+}
+
+static void
+gst_hls_demux_stream_decrypt_end (GstHLSDemuxStream * stream)
+{
+  return;
+}
+#endif
+
+static GstBuffer *
+gst_hls_demux_decrypt_fragment (GstHLSDemux * demux, GstHLSDemuxStream * stream,
+    GstBuffer * encrypted_buffer, GError ** err)
+{
+  GstBuffer *decrypted_buffer = NULL;
+  GstMapInfo encrypted_info, decrypted_info;
+
+  decrypted_buffer =
+      gst_buffer_new_allocate (NULL, gst_buffer_get_size (encrypted_buffer),
+      NULL);
+
+  gst_buffer_map (encrypted_buffer, &encrypted_info, GST_MAP_READ);
+  gst_buffer_map (decrypted_buffer, &decrypted_info, GST_MAP_WRITE);
+
+  if (!decrypt_fragment (stream, encrypted_info.size,
+          encrypted_info.data, decrypted_info.data))
+    goto decrypt_error;
+
+
+  gst_buffer_unmap (decrypted_buffer, &decrypted_info);
+  gst_buffer_unmap (encrypted_buffer, &encrypted_info);
+
+  gst_buffer_unref (encrypted_buffer);
+
+  return decrypted_buffer;
+
+decrypt_error:
+  GST_ERROR_OBJECT (demux, "Failed to decrypt fragment");
+  g_set_error (err, GST_STREAM_ERROR, GST_STREAM_ERROR_DECRYPT,
+      "Failed to decrypt fragment");
+
+  gst_buffer_unmap (decrypted_buffer, &decrypted_info);
+  gst_buffer_unmap (encrypted_buffer, &encrypted_info);
+
+  gst_buffer_unref (encrypted_buffer);
+  gst_buffer_unref (decrypted_buffer);
+
+  return NULL;
+}
+
+static gint64
+gst_hls_demux_get_manifest_update_interval (GstAdaptiveDemux * demux)
+{
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  GstClockTime target_duration = 5 * GST_SECOND;
+
+  if (hlsdemux->main_stream && hlsdemux->main_stream->playlist) {
+    GstHLSMediaPlaylist *playlist = hlsdemux->main_stream->playlist;
+
+    if (playlist->version > 5) {
+      target_duration = hlsdemux->main_stream->playlist->targetduration;
+    } else if (playlist->segments->len) {
+      GstM3U8MediaSegment *last_seg =
+          g_ptr_array_index (playlist->segments, playlist->segments->len - 1);
+      target_duration = last_seg->duration;
+    }
+    if (playlist->reloaded && target_duration > (playlist->targetduration / 2)) {
+      GST_DEBUG_OBJECT (demux,
+          "Playlist didn't change previously, returning lower update interval");
+      target_duration /= 2;
+    }
+  }
+
+  GST_DEBUG_OBJECT (demux, "Returning update interval of %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (target_duration));
+
+  return gst_util_uint64_scale (target_duration, G_USEC_PER_SEC, GST_SECOND);
+}
+
+static GstClockTime
+gst_hls_demux_get_presentation_offset (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream)
+{
+  GstHLSDemux *hlsdemux = (GstHLSDemux *) demux;
+  GstHLSDemuxStream *hls_stream = (GstHLSDemuxStream *) stream;
+
+  GST_DEBUG_OBJECT (stream, "presentation_offset %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (hls_stream->presentation_offset));
+
+  /* If this stream and the variant stream are ISOBMFF, returns the presentation
+   * offset of the variant stream */
+  if (hls_stream->parser_type == GST_HLS_PARSER_ISOBMFF
+      && hlsdemux->main_stream->parser_type == GST_HLS_PARSER_ISOBMFF)
+    return hlsdemux->main_stream->presentation_offset;
+  return hls_stream->presentation_offset;
+}
+
+static gboolean
+gst_hls_demux_get_live_seek_range (GstAdaptiveDemux * demux, gint64 * start,
+    gint64 * stop)
+{
+  GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux);
+  gboolean ret = FALSE;
+
+  if (hlsdemux->main_stream && hlsdemux->main_stream->playlist)
+    ret =
+        gst_hls_media_playlist_get_seek_range (hlsdemux->main_stream->playlist,
+        start, stop);
+
+  return ret;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlsdemux.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlsdemux.h
new file mode 100644 (file)
index 0000000..3a0109d
--- /dev/null
@@ -0,0 +1,252 @@
+/* GStreamer
+ * Copyright (C) 2010 Marc-Andre Lureau <marcandre.lureau@gmail.com>
+ * Copyright (C) 2010 Andoni Morales Alastruey <ylatuya@gmail.com>
+ * Copyright (C) 2015 Tim-Philipp Müller <tim@centricular.com>
+ *
+ * gsthlsdemux.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_HLS_DEMUX_H__
+#define __GST_HLS_DEMUX_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include "m3u8.h"
+#include "gstisoff.h"
+#include "gstadaptivedemux.h"
+#if defined(HAVE_OPENSSL)
+#include <openssl/evp.h>
+#elif defined(HAVE_NETTLE)
+#include <nettle/aes.h>
+#include <nettle/cbc.h>
+#elif defined(HAVE_LIBGCRYPT)
+#include <gcrypt.h>
+#endif
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_HLS_DEMUX2 \
+  (gst_hls_demux2_get_type())
+#define GST_HLS_DEMUX(obj) \
+  (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_HLS_DEMUX2,GstHLSDemux))
+#define GST_HLS_DEMUX_CLASS(klass) \
+  (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_HLS_DEMUX2,GstHLSDemuxClass))
+#define GST_IS_HLS_DEMUX(obj) \
+  (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_HLS_DEMUX2))
+#define GST_IS_HLS_DEMUX_CLASS(klass) \
+  (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_HLS_DEMUX2))
+#define GST_HLS_DEMUX_GET_CLASS(obj) \
+  (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_HLS_DEMUX2,GstHLSDemuxClass))
+#define GST_HLS_DEMUX_CAST(obj) \
+  ((GstHLSDemux *)obj)
+
+typedef struct _GstHLSDemux2 GstHLSDemux;
+typedef struct _GstHLSDemux2Class GstHLSDemuxClass;
+
+#define GST_TYPE_HLS_DEMUX_STREAM \
+  (gst_hls_demux_stream_get_type())
+#define GST_HLS_DEMUX_STREAM(obj) \
+  (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_HLS_DEMUX_STREAM,GstHLSDemuxStream))
+#define GST_HLS_DEMUX_STREAM_CAST(obj) ((GstHLSDemuxStream *)obj)
+
+typedef struct _GstHLSDemuxStream GstHLSDemuxStream;
+typedef GstAdaptiveDemux2StreamClass GstHLSDemuxStreamClass;
+
+typedef enum {
+  GST_HLS_PARSER_NONE,
+  GST_HLS_PARSER_MPEGTS,
+  GST_HLS_PARSER_ID3,
+  GST_HLS_PARSER_WEBVTT,
+  GST_HLS_PARSER_ISOBMFF
+} GstHLSParserType;
+
+typedef enum {
+  /* More data is needed to parse the fragment */
+  GST_HLS_PARSER_RESULT_NEED_MORE_DATA,
+  /* An error happened, discard the fragment */
+  GST_HLS_PARSER_RESULT_ERROR,
+  /* Parsing suceeded, it no longer needs to be called for the fragment */
+  GST_HLS_PARSER_RESULT_DONE,
+  /* The fragment wasn't the expected one. Current data must be dropped and
+   * GST_ADAPTIVE_DEMUX_FLOW_RESTART_FRAGMENT returned to the parent class */
+  GST_HLS_PARSER_RESULT_RESYNC
+} GstHLSParserResult;
+
+struct _GstHLSDemuxStream
+{
+  GstAdaptiveDemux2Stream adaptive_demux_stream;
+
+  /* A stream either variants or renditions */
+  gboolean is_variant;
+
+  /* Rendition-specific fields */
+  GstStreamType rendition_type;        /* FIXME: Also used by variant streams */
+  gchar *lang;
+  gchar *name;
+  GstHLSRenditionStream *current_rendition;
+  /* rendition to switch to */
+  GstHLSRenditionStream *pending_rendition;
+  /* End of Rendition-specific fields */
+
+  /* Whether the underlying playlist was fetched on creation */
+  gboolean playlist_fetched;
+
+  /* The media playlist currently used */
+  GstHLSMediaPlaylist *playlist;
+
+  /* The segment (from the above playlist) currently being used */
+  GstM3U8MediaSegment *current_segment;
+
+  /* The previous ::advance_fragment() failed for live stream */
+  gboolean pending_advance;
+
+  /* Whether we need to typefind the next buffer */
+  gboolean do_typefind;
+
+  /* for collecting data until typefind succeeds */
+  GstBuffer *pending_typefind_buffer;
+
+  /* for chunking data into 16 byte multiples for decryption */
+  GstAdapter *pending_encrypted_data;
+
+ /* last decrypted buffer for pkcs7 unpadding.  We only know that it is the last
+  * on ::finish_fragment() */
+  GstBuffer *pending_decrypted_buffer;
+
+  /* Current offset (in bytes) in fragment data we pushed downstream. Resets to
+   * -1 at every fragment start */
+  guint64 current_offset;
+
+  gboolean reset_pts;
+
+  /* decryption tooling */
+#if defined(HAVE_OPENSSL)
+# if OPENSSL_VERSION_NUMBER < 0x10100000L
+  EVP_CIPHER_CTX aes_ctx;
+# else
+  EVP_CIPHER_CTX *aes_ctx;
+# endif
+#elif defined(HAVE_NETTLE)
+  struct CBC_CTX (struct aes128_ctx, AES_BLOCK_SIZE) aes_ctx;
+#elif defined(HAVE_LIBGCRYPT)
+  gcry_cipher_hd_t aes_ctx;
+#endif
+
+  gchar     *current_key;
+  guint8    *current_iv;
+
+  /* The type of parser used for data handling */
+  GstHLSParserType parser_type;
+
+  /* Is content processing required ? */
+  gboolean process_buffer_content;
+  /* Data to be analyzed by  */
+  GstBuffer *pending_segment_data;
+  /* TRUE if pending_segment_data contains data from a header/index */
+  gboolean pending_data_is_header;
+
+  /* ISOBMFF */
+  GstMoovBox *moov;
+
+  /* Presentation offset to use and report. This value will be appended to all
+   * "output" stream times. Not enabled (i.e 0) if variant is ISOBMFF
+   */
+  GstClockTime presentation_offset;
+};
+
+typedef struct {
+  guint8 data[16];
+} GstHLSKey;
+
+/**
+ * GstHLSDemux:
+ *
+ * Opaque #GstHLSDemux data structure.
+ */
+struct _GstHLSDemux2
+{
+  GstAdaptiveDemux parent;
+
+  /* Initial bitrate to use before any bandwidth measurement */
+  guint start_bitrate;
+
+  /* Decryption key cache: url => GstHLSKey */
+  GHashTable *keys;
+  GMutex      keys_lock;
+
+  /* FIXME: check locking, protected automatically by manifest_lock already? */
+  /* The master playlist with the available variant streams */
+  GstHLSMasterPlaylist *master;
+
+  GstHLSVariantStream  *current_variant;
+  /* The variant to switch to */
+  GstHLSVariantStream  *pending_variant;
+
+  GstHLSDemuxStream *main_stream;
+
+  /* Time Mappings (GstHLSTimeMap) */
+  GList *mappings;
+};
+
+struct _GstHLSDemux2Class
+{
+  GstAdaptiveDemuxClass parent_class;
+};
+
+
+gchar *gst_hls_buf_to_utf8_text (GstBuffer * buf);
+
+/* Private */
+
+GstHLSParserResult gst_hlsdemux_handle_content_mpegts (GstHLSDemux       *demux,
+                                                      GstHLSDemuxStream *hls_stream,
+                                                      gboolean           draining,
+                                                      GstBuffer        **buffer);
+
+GstHLSParserResult gst_hlsdemux_handle_content_id3 (GstHLSDemux       *demux,
+                                                   GstHLSDemuxStream *hls_stream,
+                                                   gboolean           draining,
+                                                   GstBuffer        **buffer);
+
+GstHLSParserResult gst_hlsdemux_handle_content_isobmff (GstHLSDemux       *demux,
+                                                       GstHLSDemuxStream *hls_stream,
+                                                       gboolean           draining,
+                                                       GstBuffer        **buffer);
+
+GstHLSParserResult gst_hlsdemux_handle_content_webvtt (GstHLSDemux        *demux,
+                                                      GstHLSDemuxStream *hls_stream,
+                                                      gboolean           draining,
+                                                      GstBuffer        **buffer);
+
+GstHLSParserResult gst_hlsdemux_handle_internal_time (GstHLSDemux       *demux,
+                                                     GstHLSDemuxStream *hls_stream,
+                                                     GstClockTime       internal_time);
+
+GstClockTimeDiff gst_hls_internal_to_stream_time (GstHLSTimeMap *map,
+                                                 GstClockTime   internal_time);
+
+GstHLSTimeMap *gst_hls_find_time_map (GstHLSDemux * demux, gint64 dsn);
+
+GType gst_hls_demux2_get_type (void);
+GType gst_hls_demux_stream_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (hlsdemux2);
+
+G_END_DECLS
+#endif /* __GST_HLS_DEMUX_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlselement.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlselement.c
new file mode 100644 (file)
index 0000000..c2f2d16
--- /dev/null
@@ -0,0 +1,19 @@
+
+#ifdef HAVE_CONFIG_H
+#  include <config.h>
+#endif
+
+#include "gsthlselements.h"
+
+GST_DEBUG_CATEGORY (hls_debug);
+
+void
+hls_element_init (void)
+{
+  static gsize res = FALSE;
+  if (g_once_init_enter (&res)) {
+    GST_DEBUG_CATEGORY_INIT (hls_debug, "hlsng", 0,
+        "HTTP Live Streaming (HLS) NG");
+    g_once_init_leave (&res, TRUE);
+  }
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlselements.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/gsthlselements.h
new file mode 100644 (file)
index 0000000..bdc1943
--- /dev/null
@@ -0,0 +1,14 @@
+#ifndef __GST_HLS_ELEMENT_H__
+#define __GST_HLS_ELEMENT_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+void hls_element_init (void);
+
+GST_DEBUG_CATEGORY_EXTERN (hls_debug);
+
+G_END_DECLS
+
+#endif /* __GST_HLS_ELEMENT_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/m3u8.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/m3u8.c
new file mode 100644 (file)
index 0000000..f976b2f
--- /dev/null
@@ -0,0 +1,2203 @@
+/* GStreamer
+ * Copyright (C) 2010 Marc-Andre Lureau <marcandre.lureau@gmail.com>
+ * Copyright (C) 2015 Tim-Philipp Müller <tim@centricular.com>
+ *
+ * Copyright (C) 2021-2022 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ * m3u8.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <stdlib.h>
+#include <math.h>
+#include <errno.h>
+#include <glib.h>
+#include <gmodule.h>
+#include <string.h>
+
+#include <gst/pbutils/pbutils.h>
+#include "m3u8.h"
+#include "gstadaptivedemux.h"
+#include "gsthlselements.h"
+
+#define GST_CAT_DEFAULT hls_debug
+
+static void gst_m3u8_init_file_unref (GstM3U8InitFile * self);
+static gchar *uri_join (const gchar * uri, const gchar * path);
+
+GstHLSMediaPlaylist *
+gst_hls_media_playlist_ref (GstHLSMediaPlaylist * m3u8)
+{
+  g_assert (m3u8 != NULL && m3u8->ref_count > 0);
+
+  g_atomic_int_add (&m3u8->ref_count, 1);
+  return m3u8;
+}
+
+void
+gst_hls_media_playlist_unref (GstHLSMediaPlaylist * self)
+{
+  g_return_if_fail (self != NULL && self->ref_count > 0);
+
+  if (g_atomic_int_dec_and_test (&self->ref_count)) {
+    g_free (self->uri);
+    g_free (self->base_uri);
+
+    g_ptr_array_free (self->segments, TRUE);
+
+    g_free (self->last_data);
+    g_mutex_clear (&self->lock);
+    g_free (self);
+  }
+}
+
+static GstM3U8MediaSegment *
+gst_m3u8_media_segment_new (gchar * uri, gchar * title, GstClockTime duration,
+    gint64 sequence, gint64 discont_sequence)
+{
+  GstM3U8MediaSegment *file;
+
+  file = g_new0 (GstM3U8MediaSegment, 1);
+  file->uri = uri;
+  file->title = title;
+  file->duration = duration;
+  file->sequence = sequence;
+  file->discont_sequence = discont_sequence;
+  file->ref_count = 1;
+
+  file->stream_time = GST_CLOCK_STIME_NONE;
+
+  return file;
+}
+
+GstM3U8MediaSegment *
+gst_m3u8_media_segment_ref (GstM3U8MediaSegment * mfile)
+{
+  g_assert (mfile != NULL && mfile->ref_count > 0);
+
+  g_atomic_int_add (&mfile->ref_count, 1);
+  return mfile;
+}
+
+void
+gst_m3u8_media_segment_unref (GstM3U8MediaSegment * self)
+{
+  g_return_if_fail (self != NULL && self->ref_count > 0);
+
+  if (g_atomic_int_dec_and_test (&self->ref_count)) {
+    if (self->init_file)
+      gst_m3u8_init_file_unref (self->init_file);
+    g_free (self->title);
+    g_free (self->uri);
+    g_free (self->key);
+    if (self->datetime)
+      g_date_time_unref (self->datetime);
+    g_free (self);
+  }
+}
+
+static GstM3U8InitFile *
+gst_m3u8_init_file_new (gchar * uri)
+{
+  GstM3U8InitFile *file;
+
+  file = g_new0 (GstM3U8InitFile, 1);
+  file->uri = uri;
+  file->ref_count = 1;
+
+  return file;
+}
+
+static GstM3U8InitFile *
+gst_m3u8_init_file_ref (GstM3U8InitFile * ifile)
+{
+  g_assert (ifile != NULL && ifile->ref_count > 0);
+
+  g_atomic_int_add (&ifile->ref_count, 1);
+  return ifile;
+}
+
+static void
+gst_m3u8_init_file_unref (GstM3U8InitFile * self)
+{
+  g_return_if_fail (self != NULL && self->ref_count > 0);
+
+  if (g_atomic_int_dec_and_test (&self->ref_count)) {
+    g_free (self->uri);
+    g_free (self);
+  }
+}
+
+static gboolean
+int_from_string (gchar * ptr, gchar ** endptr, gint * val)
+{
+  gchar *end;
+  gint64 ret;
+
+  g_return_val_if_fail (ptr != NULL, FALSE);
+  g_return_val_if_fail (val != NULL, FALSE);
+
+  errno = 0;
+  ret = g_ascii_strtoll (ptr, &end, 10);
+  if ((errno == ERANGE && (ret == G_MAXINT64 || ret == G_MININT64))
+      || (errno != 0 && ret == 0)) {
+    GST_WARNING ("%s", g_strerror (errno));
+    return FALSE;
+  }
+
+  if (ret > G_MAXINT || ret < G_MININT) {
+    GST_WARNING ("%s", g_strerror (ERANGE));
+    return FALSE;
+  }
+
+  if (endptr)
+    *endptr = end;
+
+  *val = (gint) ret;
+
+  return end != ptr;
+}
+
+static gboolean
+int64_from_string (gchar * ptr, gchar ** endptr, gint64 * val)
+{
+  gchar *end;
+  gint64 ret;
+
+  g_return_val_if_fail (ptr != NULL, FALSE);
+  g_return_val_if_fail (val != NULL, FALSE);
+
+  errno = 0;
+  ret = g_ascii_strtoll (ptr, &end, 10);
+  if ((errno == ERANGE && (ret == G_MAXINT64 || ret == G_MININT64))
+      || (errno != 0 && ret == 0)) {
+    GST_WARNING ("%s", g_strerror (errno));
+    return FALSE;
+  }
+
+  if (endptr)
+    *endptr = end;
+
+  *val = ret;
+
+  return end != ptr;
+}
+
+static gboolean
+double_from_string (gchar * ptr, gchar ** endptr, gdouble * val)
+{
+  gchar *end;
+  gdouble ret;
+
+  g_return_val_if_fail (ptr != NULL, FALSE);
+  g_return_val_if_fail (val != NULL, FALSE);
+
+  errno = 0;
+  ret = g_ascii_strtod (ptr, &end);
+  if ((errno == ERANGE && (ret == HUGE_VAL || ret == -HUGE_VAL))
+      || (errno != 0 && ret == 0)) {
+    GST_WARNING ("%s", g_strerror (errno));
+    return FALSE;
+  }
+
+  if (!isfinite (ret)) {
+    GST_WARNING ("%s", g_strerror (ERANGE));
+    return FALSE;
+  }
+
+  if (endptr)
+    *endptr = end;
+
+  *val = (gdouble) ret;
+
+  return end != ptr;
+}
+
+static gboolean
+parse_attributes (gchar ** ptr, gchar ** a, gchar ** v)
+{
+  gchar *end = NULL, *p, *ve;
+
+  g_return_val_if_fail (ptr != NULL, FALSE);
+  g_return_val_if_fail (*ptr != NULL, FALSE);
+  g_return_val_if_fail (a != NULL, FALSE);
+  g_return_val_if_fail (v != NULL, FALSE);
+
+  /* [attribute=value,]* */
+
+  *a = *ptr;
+  end = p = g_utf8_strchr (*ptr, -1, ',');
+  if (end) {
+    gchar *q = g_utf8_strchr (*ptr, -1, '"');
+    if (q && q < end) {
+      /* special case, such as CODECS="avc1.77.30, mp4a.40.2" */
+      q = g_utf8_next_char (q);
+      if (q) {
+        q = g_utf8_strchr (q, -1, '"');
+      }
+      if (q) {
+        end = p = g_utf8_strchr (q, -1, ',');
+      }
+    }
+  }
+  if (end) {
+    do {
+      end = g_utf8_next_char (end);
+    } while (end && *end == ' ');
+    *p = '\0';
+  }
+
+  *v = p = g_utf8_strchr (*ptr, -1, '=');
+  if (*v) {
+    *p = '\0';
+    *v = g_utf8_next_char (*v);
+    if (**v == '"') {
+      ve = g_utf8_next_char (*v);
+      if (ve) {
+        ve = g_utf8_strchr (ve, -1, '"');
+      }
+      if (ve) {
+        *v = g_utf8_next_char (*v);
+        *ve = '\0';
+      } else {
+        GST_WARNING ("Cannot remove quotation marks from %s", *a);
+      }
+    }
+  } else {
+    GST_WARNING ("missing = after attribute");
+    return FALSE;
+  }
+
+  *ptr = end;
+  return TRUE;
+}
+
+GstHLSMediaPlaylist *
+gst_hls_media_playlist_new (const gchar * uri, const gchar * base_uri)
+{
+  GstHLSMediaPlaylist *m3u8;
+
+  m3u8 = g_new0 (GstHLSMediaPlaylist, 1);
+
+  m3u8->uri = g_strdup (uri);
+  m3u8->base_uri = g_strdup (base_uri);
+
+  m3u8->version = 1;
+  m3u8->type = GST_HLS_PLAYLIST_TYPE_UNDEFINED;
+  m3u8->targetduration = GST_CLOCK_TIME_NONE;
+  m3u8->media_sequence = 0;
+  m3u8->discont_sequence = 0;
+  m3u8->endlist = FALSE;
+  m3u8->i_frame = FALSE;
+  m3u8->allowcache = TRUE;
+
+  m3u8->ext_x_key_present = FALSE;
+  m3u8->ext_x_pdt_present = FALSE;
+
+  m3u8->segments =
+      g_ptr_array_new_full (16, (GDestroyNotify) gst_m3u8_media_segment_unref);
+
+  m3u8->duration = 0;
+
+  g_mutex_init (&m3u8->lock);
+  m3u8->ref_count = 1;
+
+  return m3u8;
+}
+
+void
+gst_hls_media_playlist_dump (GstHLSMediaPlaylist * self)
+{
+#ifndef GST_DISABLE_GST_DEBUG
+  guint idx;
+  gchar *datestring;
+
+  GST_DEBUG ("uri              : %s", self->uri);
+  GST_DEBUG ("base_uri         : %s", self->base_uri);
+
+  GST_DEBUG ("version          : %d", self->version);
+
+  GST_DEBUG ("targetduration   : %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (self->targetduration));
+  GST_DEBUG ("media_sequence   : %" G_GINT64_FORMAT, self->media_sequence);
+  GST_DEBUG ("discont_sequence : %" G_GINT64_FORMAT, self->discont_sequence);
+
+  GST_DEBUG ("endlist          : %s",
+      self->endlist ? "present" : "NOT present");
+  GST_DEBUG ("i_frame          : %s", self->i_frame ? "YES" : "NO");
+
+  GST_DEBUG ("EXT-X-KEY        : %s",
+      self->ext_x_key_present ? "present" : "NOT present");
+  GST_DEBUG ("EXT-X-PROGRAM-DATE-TIME : %s",
+      self->ext_x_pdt_present ? "present" : "NOT present");
+
+  GST_DEBUG ("duration         : %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (self->duration));
+
+  GST_DEBUG ("Segments : %d", self->segments->len);
+  for (idx = 0; idx < self->segments->len; idx++) {
+    GstM3U8MediaSegment *segment = g_ptr_array_index (self->segments, idx);
+
+    GST_DEBUG ("  sequence:%" G_GINT64_FORMAT " discont_sequence:%"
+        G_GINT64_FORMAT, segment->sequence, segment->discont_sequence);
+    GST_DEBUG ("    stream_time : %" GST_STIME_FORMAT,
+        GST_STIME_ARGS (segment->stream_time));
+    GST_DEBUG ("    duration    :  %" GST_TIME_FORMAT,
+        GST_TIME_ARGS (segment->duration));
+    if (segment->title)
+      GST_DEBUG ("    title       : %s", segment->title);
+    GST_DEBUG ("    discont     : %s", segment->discont ? "YES" : "NO");
+    if (segment->datetime) {
+      datestring = g_date_time_format_iso8601 (segment->datetime);
+      GST_DEBUG ("    date/time    : %s", datestring);
+      g_free (datestring);
+    }
+    GST_DEBUG ("    uri         : %s %" G_GUINT64_FORMAT " %" G_GINT64_FORMAT,
+        segment->uri, segment->offset, segment->size);
+  }
+#endif
+}
+
+static void
+gst_hls_media_playlist_postprocess_pdt (GstHLSMediaPlaylist * self)
+{
+  gint idx, len = self->segments->len;
+  gint first_pdt = -1;
+  GstM3U8MediaSegment *previous = NULL;
+  GstM3U8MediaSegment *segment = NULL;
+
+  /* Iterate forward, and make sure datetimes are coherent */
+  for (idx = 0; idx < len; idx++, previous = segment) {
+    segment = g_ptr_array_index (self->segments, idx);
+
+#define ABSDIFF(a,b) ((a) > (b) ? (a) - (b) : (b) - (a))
+
+    if (segment->datetime) {
+      if (first_pdt == -1)
+        first_pdt = idx;
+      if (!segment->discont && previous && previous->datetime) {
+        GstClockTimeDiff diff = g_date_time_difference (segment->datetime,
+            previous->datetime) * GST_USECOND;
+        if (ABSDIFF (diff, previous->duration) > 500 * GST_MSECOND) {
+          GST_LOG ("PDT diff %" GST_STIME_FORMAT " previous duration %"
+              GST_TIME_FORMAT, GST_STIME_ARGS (diff),
+              GST_TIME_ARGS (previous->duration));
+          g_date_time_unref (segment->datetime);
+          segment->datetime =
+              g_date_time_add (previous->datetime,
+              previous->duration / GST_USECOND);
+        }
+      }
+    } else {
+      if (segment->discont) {
+        GST_WARNING ("Discont segment doesn't have a PDT !");
+      } else if (previous) {
+        if (previous->datetime) {
+          segment->datetime =
+              g_date_time_add (previous->datetime,
+              previous->duration / GST_USECOND);
+          GST_LOG
+              ("Generated new PDT based on previous segment PDT and duration");
+        } else {
+          GST_LOG ("Missing PDT, but can't generate it from previous one");
+        }
+      }
+    }
+  }
+
+  if (first_pdt != -1 && first_pdt != 0) {
+    GST_LOG ("Scanning backwards from %d", first_pdt);
+    previous = g_ptr_array_index (self->segments, first_pdt);
+    for (idx = first_pdt - 1; idx >= 0; idx = idx - 1) {
+      GST_LOG ("%d", idx);
+      segment = g_ptr_array_index (self->segments, idx);
+      if (!segment->datetime && previous->datetime) {
+        segment->datetime =
+            g_date_time_add (previous->datetime,
+            -(segment->duration / GST_USECOND));
+      }
+      previous = segment;
+    }
+  }
+}
+
+/* Parse and create a new GstHLSMediaPlaylist */
+GstHLSMediaPlaylist *
+gst_hls_media_playlist_parse (gchar * data, const gchar * uri,
+    const gchar * base_uri)
+{
+  gchar *input_data = data;
+  GstHLSMediaPlaylist *self;
+  gint val;
+  GstClockTime duration;
+  gchar *title, *end;
+  gboolean discontinuity = FALSE;
+  gchar *current_key = NULL;
+  gboolean have_iv = FALSE;
+  guint8 iv[16] = { 0, };
+  gint64 size = -1, offset = -1;
+  gint64 mediasequence = 0;
+  gint64 dsn = 0;
+  GDateTime *date_time = NULL;
+  GstM3U8InitFile *last_init_file = NULL;
+  GstM3U8MediaSegment *previous = NULL;
+
+  GST_LOG ("uri: %s", uri);
+  GST_LOG ("base_uri: %s", base_uri);
+  GST_TRACE ("data:\n%s", data);
+
+  if (!g_str_has_prefix (data, "#EXTM3U")) {
+    GST_WARNING ("Data doesn't start with #EXTM3U");
+    g_free (data);
+    return NULL;
+  }
+
+  if (g_strrstr (data, "\n#EXT-X-STREAM-INF:") != NULL) {
+    GST_WARNING ("Not a media playlist, but a master playlist!");
+    g_free (data);
+    return NULL;
+  }
+
+  self = gst_hls_media_playlist_new (uri, base_uri);
+
+  /* Store a copy of the data */
+  self->last_data = g_strdup (data);
+
+  duration = 0;
+  title = NULL;
+  data += 7;
+  while (TRUE) {
+    gchar *r;
+
+    end = g_utf8_strchr (data, -1, '\n');
+    if (end)
+      *end = '\0';
+
+    r = g_utf8_strchr (data, -1, '\r');
+    if (r)
+      *r = '\0';
+
+    if (data[0] != '#' && data[0] != '\0') {
+      if (duration <= 0) {
+        GST_LOG ("%s: got line without EXTINF, dropping", data);
+        goto next_line;
+      }
+
+      data = uri_join (self->base_uri ? self->base_uri : self->uri, data);
+
+      /* Let's check this is not a bogus duplicate entry */
+      if (previous && !discontinuity && !g_strcmp0 (data, previous->uri)
+          && (offset == -1 || previous->offset == offset)) {
+        GST_WARNING ("Dropping duplicate segment entry");
+        g_free (data);
+        data = NULL;
+        date_time = NULL;
+        duration = 0;
+        title = NULL;
+        discontinuity = FALSE;
+        size = offset = -1;
+        goto next_line;
+      }
+      if (data != NULL) {
+        GstM3U8MediaSegment *file;
+        /* We can finally create the segment */
+        /* The disconinuity sequence number is only stored if the header has
+         * EXT-X-DISCONTINUITY-SEQUENCE present.  */
+        file =
+            gst_m3u8_media_segment_new (data, title, duration, mediasequence++,
+            dsn);
+        self->duration += duration;
+
+        /* set encryption params */
+        file->key = current_key ? g_strdup (current_key) : NULL;
+        if (file->key) {
+          if (have_iv) {
+            memcpy (file->iv, iv, sizeof (iv));
+          } else {
+            guint8 *iv = file->iv + 12;
+            GST_WRITE_UINT32_BE (iv, file->sequence);
+          }
+        }
+
+        if (size != -1) {
+          file->size = size;
+          if (offset != -1) {
+            file->offset = offset;
+          } else {
+            file->offset = 0;
+          }
+        } else {
+          file->size = -1;
+          file->offset = 0;
+        }
+
+        file->datetime = date_time;
+        file->discont = discontinuity;
+        if (last_init_file)
+          file->init_file = gst_m3u8_init_file_ref (last_init_file);
+
+        date_time = NULL;
+        duration = 0;
+        title = NULL;
+        discontinuity = FALSE;
+        size = offset = -1;
+        g_ptr_array_add (self->segments, file);
+        previous = file;
+      }
+
+    } else if (g_str_has_prefix (data, "#EXTINF:")) {
+      gdouble fval;
+      if (!double_from_string (data + 8, &data, &fval)) {
+        GST_WARNING ("Can't read EXTINF duration");
+        goto next_line;
+      }
+      duration = fval * (gdouble) GST_SECOND;
+      if (self->targetduration > 0 && duration > self->targetduration) {
+        GST_DEBUG ("EXTINF duration (%" GST_TIME_FORMAT
+            ") > TARGETDURATION (%" GST_TIME_FORMAT ")",
+            GST_TIME_ARGS (duration), GST_TIME_ARGS (self->targetduration));
+      }
+      if (!data || *data != ',')
+        goto next_line;
+      data = g_utf8_next_char (data);
+      if (data != end) {
+        g_free (title);
+        title = g_strdup (data);
+      }
+    } else if (g_str_has_prefix (data, "#EXT-X-")) {
+      gchar *data_ext_x = data + 7;
+
+      /* All these entries start with #EXT-X- */
+      if (g_str_has_prefix (data_ext_x, "ENDLIST")) {
+        self->endlist = TRUE;
+      } else if (g_str_has_prefix (data_ext_x, "VERSION:")) {
+        if (int_from_string (data + 15, &data, &val))
+          self->version = val;
+      } else if (g_str_has_prefix (data_ext_x, "PLAYLIST-TYPE:")) {
+        if (!g_strcmp0 (data + 21, "VOD"))
+          self->type = GST_HLS_PLAYLIST_TYPE_VOD;
+        else if (!g_strcmp0 (data + 21, "EVENT"))
+          self->type = GST_HLS_PLAYLIST_TYPE_EVENT;
+        else
+          GST_WARNING ("Unknown playlist type '%s'", data + 21);
+      } else if (g_str_has_prefix (data_ext_x, "TARGETDURATION:")) {
+        if (int_from_string (data + 22, &data, &val))
+          self->targetduration = val * GST_SECOND;
+      } else if (g_str_has_prefix (data_ext_x, "MEDIA-SEQUENCE:")) {
+        if (int_from_string (data + 22, &data, &val))
+          self->media_sequence = mediasequence = val;
+      } else if (g_str_has_prefix (data_ext_x, "DISCONTINUITY-SEQUENCE:")) {
+        if (int_from_string (data + 30, &data, &val)
+            && val != self->discont_sequence) {
+          dsn = self->discont_sequence = val;
+          self->has_ext_x_dsn = TRUE;
+        }
+      } else if (g_str_has_prefix (data_ext_x, "DISCONTINUITY")) {
+        dsn++;
+        discontinuity = TRUE;
+      } else if (g_str_has_prefix (data_ext_x, "PROGRAM-DATE-TIME:")) {
+        date_time = g_date_time_new_from_iso8601 (data + 25, NULL);
+        if (date_time)
+          self->ext_x_pdt_present = TRUE;
+      } else if (g_str_has_prefix (data_ext_x, "ALLOW-CACHE:")) {
+        self->allowcache = g_ascii_strcasecmp (data + 19, "YES") == 0;
+      } else if (g_str_has_prefix (data_ext_x, "KEY:")) {
+        gchar *v, *a;
+
+        data = data + 11;
+
+        /* IV and KEY are only valid until the next #EXT-X-KEY */
+        have_iv = FALSE;
+        g_free (current_key);
+        current_key = NULL;
+        while (data && parse_attributes (&data, &a, &v)) {
+          if (g_str_equal (a, "URI")) {
+            current_key =
+                uri_join (self->base_uri ? self->base_uri : self->uri, v);
+          } else if (g_str_equal (a, "IV")) {
+            gchar *ivp = v;
+            gint i;
+
+            if (strlen (ivp) < 32 + 2 || (!g_str_has_prefix (ivp, "0x")
+                    && !g_str_has_prefix (ivp, "0X"))) {
+              GST_WARNING ("Can't read IV");
+              continue;
+            }
+
+            ivp += 2;
+            for (i = 0; i < 16; i++) {
+              gint h, l;
+
+              h = g_ascii_xdigit_value (*ivp);
+              ivp++;
+              l = g_ascii_xdigit_value (*ivp);
+              ivp++;
+              if (h == -1 || l == -1) {
+                i = -1;
+                break;
+              }
+              iv[i] = (h << 4) | l;
+            }
+
+            if (i == -1) {
+              GST_WARNING ("Can't read IV");
+              continue;
+            }
+            have_iv = TRUE;
+          } else if (g_str_equal (a, "METHOD")) {
+            if (!g_str_equal (v, "AES-128") && !g_str_equal (v, "NONE")) {
+              GST_WARNING ("Encryption method %s not supported", v);
+              continue;
+            }
+            self->ext_x_key_present = TRUE;
+          }
+        }
+      } else if (g_str_has_prefix (data_ext_x, "BYTERANGE:")) {
+        gchar *v = data + 17;
+
+        size = -1;
+        offset = -1;
+        if (int64_from_string (v, &v, &size)) {
+          if (*v == '@' && !int64_from_string (v + 1, &v, &offset))
+            goto next_line;
+          /*  */
+          if (offset == -1 && previous)
+            offset = previous->offset + previous->size;
+        } else {
+          goto next_line;
+        }
+      } else if (g_str_has_prefix (data_ext_x, "MAP:")) {
+        gchar *v, *a, *header_uri = NULL;
+
+        data = data + 11;
+
+        while (data != NULL && parse_attributes (&data, &a, &v)) {
+          if (strcmp (a, "URI") == 0) {
+            header_uri =
+                uri_join (self->base_uri ? self->base_uri : self->uri, v);
+          } else if (strcmp (a, "BYTERANGE") == 0) {
+            if (int64_from_string (v, &v, &size)) {
+              if (*v == '@' && !int64_from_string (v + 1, &v, &offset)) {
+                g_free (header_uri);
+                goto next_line;
+              }
+            } else {
+              g_free (header_uri);
+              goto next_line;
+            }
+          }
+        }
+
+        if (header_uri) {
+          GstM3U8InitFile *init_file;
+          init_file = gst_m3u8_init_file_new (header_uri);
+
+          if (size != -1) {
+            init_file->size = size;
+            if (offset != -1)
+              init_file->offset = offset;
+            else
+              init_file->offset = 0;
+          } else {
+            init_file->size = -1;
+            init_file->offset = 0;
+          }
+          if (last_init_file)
+            gst_m3u8_init_file_unref (last_init_file);
+
+          last_init_file = init_file;
+        }
+      } else {
+        GST_LOG ("Ignored line: %s", data);
+      }
+    } else if (data[0]) {
+      /* Log non-empty lines */
+      GST_LOG ("Ignored line: `%s`", data);
+    }
+
+  next_line:
+    if (!end)
+      break;
+    data = g_utf8_next_char (end);      /* skip \n */
+  }
+
+  g_free (current_key);
+  current_key = NULL;
+
+  g_free (input_data);
+
+  if (last_init_file)
+    gst_m3u8_init_file_unref (last_init_file);
+
+  if (self->segments->len == 0) {
+    GST_ERROR ("Invalid media playlist, it does not contain any media files");
+    gst_hls_media_playlist_unref (self);
+    return NULL;
+  }
+
+  /* Now go over the parsed data to ensure MSN and/or PDT are set */
+  if (self->ext_x_pdt_present)
+    gst_hls_media_playlist_postprocess_pdt (self);
+
+  /* If we are not live, the stream time can be directly applied */
+  if (!GST_HLS_MEDIA_PLAYLIST_IS_LIVE (self)) {
+    gint iter, len = self->segments->len;
+    GstClockTimeDiff stream_time = 0;
+
+    for (iter = 0; iter < len; iter++) {
+      GstM3U8MediaSegment *segment = g_ptr_array_index (self->segments, iter);
+      segment->stream_time = stream_time;
+      stream_time += segment->duration;
+    }
+  }
+
+  gst_hls_media_playlist_dump (self);
+  return self;
+}
+
+/* Returns TRUE if the m3u8 as the same data as playlist_data  */
+gboolean
+gst_hls_media_playlist_has_same_data (GstHLSMediaPlaylist * self,
+    gchar * playlist_data)
+{
+  gboolean ret;
+
+  GST_HLS_MEDIA_PLAYLIST_LOCK (self);
+
+  ret = self->last_data && g_str_equal (self->last_data, playlist_data);
+
+  GST_HLS_MEDIA_PLAYLIST_UNLOCK (self);
+
+  return ret;
+}
+
+GstM3U8MediaSegment *
+gst_hls_media_playlist_seek (GstHLSMediaPlaylist * playlist, gboolean forward,
+    GstSeekFlags flags, GstClockTimeDiff ts)
+{
+  gboolean snap_nearest =
+      (flags & GST_SEEK_FLAG_SNAP_NEAREST) == GST_SEEK_FLAG_SNAP_NEAREST;
+  gboolean snap_after =
+      (flags & GST_SEEK_FLAG_SNAP_AFTER) == GST_SEEK_FLAG_SNAP_AFTER;
+  guint idx;
+  GstM3U8MediaSegment *res = NULL;
+
+  GST_DEBUG ("ts:%" GST_STIME_FORMAT " forward:%d playlist uri: %s",
+      GST_STIME_ARGS (ts), forward, playlist->uri);
+
+  for (idx = 0; idx < playlist->segments->len; idx++) {
+    GstM3U8MediaSegment *cand = g_ptr_array_index (playlist->segments, idx);
+
+    if ((forward & snap_after) || snap_nearest) {
+      if (cand->stream_time >= ts ||
+          (snap_nearest && (ts - cand->stream_time < cand->duration / 2))) {
+        res = cand;
+        goto out;
+      }
+    } else if (!forward && snap_after) {
+      GstClockTime next_pos = cand->stream_time + cand->duration;
+
+      if (next_pos <= ts && ts < next_pos + cand->duration) {
+        res = cand;
+        goto out;
+      }
+    } else if ((cand->stream_time <= ts || idx == 0)
+        && ts < cand->stream_time + cand->duration) {
+      res = cand;
+      goto out;
+    }
+  }
+
+out:
+  if (res) {
+    GST_DEBUG ("Returning segment sn:%" G_GINT64_FORMAT " stream_time:%"
+        GST_STIME_FORMAT " duration:%" GST_TIME_FORMAT, res->sequence,
+        GST_STIME_ARGS (res->stream_time), GST_TIME_ARGS (res->duration));
+    gst_m3u8_media_segment_ref (res);
+  } else {
+    GST_DEBUG ("Couldn't find a match");
+  }
+
+  return res;
+}
+
+/* Recalculate all segment DSN based on the DSN of the provided anchor segment
+ * (which must belong to the playlist). */
+static void
+gst_hls_media_playlist_recalculate_dsn (GstHLSMediaPlaylist * playlist,
+    GstM3U8MediaSegment * anchor)
+{
+  guint idx;
+  gint iter;
+  GstM3U8MediaSegment *cand, *prev;
+
+  if (!g_ptr_array_find (playlist->segments, anchor, &idx)) {
+    g_assert (FALSE);
+  }
+
+  g_assert (idx != -1);
+
+  GST_DEBUG ("Re-calculating DSN from segment #%d %" G_GINT64_FORMAT,
+      idx, anchor->discont_sequence);
+
+  /* Forward */
+  prev = anchor;
+  for (iter = idx + 1; iter < playlist->segments->len; iter++) {
+    cand = g_ptr_array_index (playlist->segments, iter);
+    if (cand->discont)
+      cand->discont_sequence = prev->discont_sequence + 1;
+    else
+      cand->discont_sequence = prev->discont_sequence;
+    prev = cand;
+  }
+
+  /* Backward */
+  prev = anchor;
+  for (iter = idx - 1; iter >= 0; iter--) {
+    cand = g_ptr_array_index (playlist->segments, iter);
+    if (prev->discont)
+      cand->discont_sequence = prev->discont_sequence - 1;
+    else
+      cand->discont_sequence = prev->discont_sequence;
+    prev = cand;
+  }
+}
+
+
+/* Recalculate all segment stream time based on the stream time of the provided
+ * anchor segment (which must belong to the playlist) */
+void
+gst_hls_media_playlist_recalculate_stream_time (GstHLSMediaPlaylist * playlist,
+    GstM3U8MediaSegment * anchor)
+{
+  guint idx;
+  gint iter;
+  GstM3U8MediaSegment *cand, *prev;
+
+  if (!g_ptr_array_find (playlist->segments, anchor, &idx)) {
+    g_assert (FALSE);
+  }
+
+  g_assert (GST_CLOCK_TIME_IS_VALID (anchor->stream_time));
+  g_assert (idx != -1);
+
+  GST_DEBUG ("Re-calculating stream times from segment #%d %" GST_TIME_FORMAT,
+      idx, GST_TIME_ARGS (anchor->stream_time));
+
+  /* Forward */
+  prev = anchor;
+  for (iter = idx + 1; iter < playlist->segments->len; iter++) {
+    cand = g_ptr_array_index (playlist->segments, iter);
+    cand->stream_time = prev->stream_time + prev->duration;
+    GST_DEBUG ("Forward iter %d %" GST_STIME_FORMAT, iter,
+        GST_STIME_ARGS (cand->stream_time));
+    prev = cand;
+  }
+
+  /* Backward */
+  prev = anchor;
+  for (iter = idx - 1; iter >= 0; iter--) {
+    cand = g_ptr_array_index (playlist->segments, iter);
+    cand->stream_time = prev->stream_time - cand->duration;
+    GST_DEBUG ("Backward iter %d %" GST_STIME_FORMAT, iter,
+        GST_STIME_ARGS (cand->stream_time));
+    prev = cand;
+  }
+}
+
+/* If a segment with the same URI, size, offset, SN and DSN is present in the
+ * playlist, returns that one */
+static GstM3U8MediaSegment *
+gst_hls_media_playlist_find_by_uri (GstHLSMediaPlaylist * playlist,
+    GstM3U8MediaSegment * segment)
+{
+  guint idx;
+
+  for (idx = 0; idx < playlist->segments->len; idx++) {
+    GstM3U8MediaSegment *cand = g_ptr_array_index (playlist->segments, idx);
+
+    if (cand->sequence == segment->sequence &&
+        cand->discont_sequence == segment->discont_sequence &&
+        cand->offset == segment->offset && cand->size == segment->size &&
+        !g_strcmp0 (cand->uri, segment->uri)) {
+      return cand;
+    }
+  }
+
+  return NULL;
+}
+
+/* Given a media segment (potentially from another media playlist), find the
+ * equivalent media segment in this playlist.
+ *
+ * This will also recalculate all stream times based on that segment stream
+ * time (i.e. "sync" the playlist to that previous time).
+ *
+ * If an equivalent/identical one is found it is returned with
+ * the reference count incremented
+ *
+ * If the reference segment is *just* before the 1st segment in the playlist, it
+ * will be inserted and returned. This allows coping with non-overlapping (but
+ * contiguous) playlist updates.
+ */
+GstM3U8MediaSegment *
+gst_hls_media_playlist_sync_to_segment (GstHLSMediaPlaylist * playlist,
+    GstM3U8MediaSegment * segment)
+{
+  guint idx = G_MAXUINT;
+  GstM3U8MediaSegment *res = NULL;
+#ifndef GST_DISABLE_GST_DEBUG
+  gchar *pdtstring;
+#endif
+
+  g_return_val_if_fail (playlist, NULL);
+  g_return_val_if_fail (segment, NULL);
+
+  GST_DEBUG ("Re-syncing to segment %" GST_STIME_FORMAT " duration:%"
+      GST_TIME_FORMAT " sn:%" G_GINT64_FORMAT "/dsn:%" G_GINT64_FORMAT
+      " uri:%s in playlist %s", GST_STIME_ARGS (segment->stream_time),
+      GST_TIME_ARGS (segment->duration), segment->sequence,
+      segment->discont_sequence, segment->uri, playlist->uri);
+
+  /* The easy one. Happens when stream times need to be re-synced in an existing
+   * playlist */
+  if (g_ptr_array_find (playlist->segments, segment, NULL)) {
+    GST_DEBUG ("Present as-is in playlist");
+    res = segment;
+    goto out;
+  }
+
+  /* If there is an identical segment with the same URI and SN, use that one */
+  res = gst_hls_media_playlist_find_by_uri (playlist, segment);
+  if (res) {
+    GST_DEBUG ("Using same URI/DSN/SN match");
+    goto out;
+  }
+
+  /* Try with PDT */
+  if (segment->datetime && playlist->ext_x_pdt_present) {
+#ifndef GST_DISABLE_GST_DEBUG
+    pdtstring = g_date_time_format_iso8601 (segment->datetime);
+    GST_DEBUG ("Search by datetime for %s", pdtstring);
+    g_free (pdtstring);
+#endif
+    for (idx = 0; idx < playlist->segments->len; idx++) {
+      GstM3U8MediaSegment *cand = g_ptr_array_index (playlist->segments, idx);
+
+      if (idx == 0 && cand->datetime) {
+        /* Special case for segments which are just before the 1st one (within
+         * 20ms). We add another reference because it now also belongs to the
+         * current playlist */
+        GDateTime *seg_end = g_date_time_add (segment->datetime,
+            segment->duration / GST_USECOND);
+        GstClockTimeDiff ddiff =
+            g_date_time_difference (cand->datetime, seg_end) * GST_USECOND;
+        g_date_time_unref (seg_end);
+        if (ABS (ddiff) < 20 * GST_MSECOND) {
+          /* The reference segment ends within 20ms of the first segment, it is just before */
+          GST_DEBUG ("Reference segment ends within %" GST_STIME_FORMAT
+              " of first playlist segment, inserting before",
+              GST_STIME_ARGS (ddiff));
+          g_ptr_array_insert (playlist->segments, 0,
+              gst_m3u8_media_segment_ref (segment));
+          res = segment;
+          goto out;
+        }
+      }
+
+      if (cand->datetime
+          && g_date_time_difference (cand->datetime, segment->datetime) >= 0) {
+        res = cand;
+        goto out;
+      }
+    }
+  }
+
+  /* If not live, we can match by stream time */
+  if (!GST_HLS_MEDIA_PLAYLIST_IS_LIVE (playlist)) {
+    GST_DEBUG ("Search by Stream time for %" GST_STIME_FORMAT " duration:%"
+        GST_TIME_FORMAT, GST_STIME_ARGS (segment->stream_time),
+        GST_TIME_ARGS (segment->duration));
+    for (idx = 0; idx < playlist->segments->len; idx++) {
+      GstM3U8MediaSegment *cand = g_ptr_array_index (playlist->segments, idx);
+
+      /* If the candidate starts at or after the previous stream time */
+      if (cand->stream_time >= segment->stream_time) {
+        res = cand;
+        goto out;
+      }
+
+      /* If the previous end stream time is before the candidate end stream time */
+      if ((segment->stream_time + segment->duration) <
+          (cand->stream_time + cand->duration)) {
+        res = cand;
+        goto out;
+      }
+    }
+  }
+
+  /* Fallback with MSN */
+  GST_DEBUG ("Search by Media Sequence Number for sn:%" G_GINT64_FORMAT " dsn:%"
+      G_GINT64_FORMAT, segment->sequence, segment->discont_sequence);
+  for (idx = 0; idx < playlist->segments->len; idx++) {
+    GstM3U8MediaSegment *cand = g_ptr_array_index (playlist->segments, idx);
+
+    if (idx == 0 && cand->sequence == segment->sequence + 1) {
+      /* Special case for segments just before the 1st one. We add another
+       * reference because it now also belongs to the current playlist */
+      GST_DEBUG ("reference segment is just before 1st segment, inserting");
+      g_ptr_array_insert (playlist->segments, 0,
+          gst_m3u8_media_segment_ref (segment));
+      res = segment;
+      goto out;
+    }
+
+    if ((segment->discont_sequence == cand->discont_sequence
+            || !playlist->has_ext_x_dsn)
+        && (cand->sequence >= segment->sequence)) {
+      res = cand;
+      goto out;
+    }
+  }
+
+out:
+  /* For live playlists we re-calculate all stream times based on the existing
+   * stream time. Non-live playlists have their stream time calculated at
+   * parsing time. */
+  if (res) {
+    gst_m3u8_media_segment_ref (res);
+    if (res->stream_time == GST_CLOCK_STIME_NONE)
+      res->stream_time = segment->stream_time;
+    if (GST_HLS_MEDIA_PLAYLIST_IS_LIVE (playlist))
+      gst_hls_media_playlist_recalculate_stream_time (playlist, res);
+    /* If the playlist didn't specify a reference discont sequence number, we
+     * carry over the one from the reference segment */
+    if (!playlist->has_ext_x_dsn
+        && res->discont_sequence != segment->discont_sequence) {
+      res->discont_sequence = segment->discont_sequence;
+      gst_hls_media_playlist_recalculate_dsn (playlist, res);
+    }
+#ifndef GST_DISABLE_GST_DEBUG
+    pdtstring =
+        res->datetime ? g_date_time_format_iso8601 (res->datetime) : NULL;
+    GST_DEBUG ("Returning segment sn:%" G_GINT64_FORMAT " dsn:%" G_GINT64_FORMAT
+        " stream_time:%" GST_STIME_FORMAT " duration:%" GST_TIME_FORMAT
+        " datetime:%s", res->sequence, res->discont_sequence,
+        GST_STIME_ARGS (res->stream_time), GST_TIME_ARGS (res->duration),
+        pdtstring);
+    g_free (pdtstring);
+#endif
+  } else if (!GST_HLS_MEDIA_PLAYLIST_IS_LIVE (playlist)) {
+    GST_DEBUG ("Could not find a match");
+  }
+
+  return res;
+}
+
+GstM3U8MediaSegment *
+gst_hls_media_playlist_get_starting_segment (GstHLSMediaPlaylist * self)
+{
+  GstM3U8MediaSegment *res;
+
+  GST_DEBUG ("playlist %s", self->uri);
+
+  if (!GST_HLS_MEDIA_PLAYLIST_IS_LIVE (self)) {
+    /* For non-live, we just grab the first one */
+    res = g_ptr_array_index (self->segments, 0);
+  } else {
+    /* Live playlist */
+    res =
+        g_ptr_array_index (self->segments,
+        MAX ((gint) self->segments->len - GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE,
+            0));
+  }
+
+  if (res) {
+    GST_DEBUG ("Using segment sn:%" G_GINT64_FORMAT " dsn:%" G_GINT64_FORMAT,
+        res->sequence, res->discont_sequence);
+    gst_m3u8_media_segment_ref (res);
+  }
+
+  return res;
+}
+
+gboolean
+gst_hls_media_playlist_has_next_fragment (GstHLSMediaPlaylist * m3u8,
+    GstM3U8MediaSegment * current, gboolean forward)
+{
+  guint idx;
+  gboolean have_next = TRUE;
+
+  g_return_val_if_fail (m3u8 != NULL, FALSE);
+  g_return_val_if_fail (current != NULL, FALSE);
+
+  GST_DEBUG ("playlist %s", m3u8->uri);
+
+  GST_HLS_MEDIA_PLAYLIST_LOCK (m3u8);
+
+  if (!g_ptr_array_find (m3u8->segments, current, &idx))
+    have_next = FALSE;
+  else if (idx == 0 && !forward)
+    have_next = FALSE;
+  else if (forward && idx == (m3u8->segments->len - 1))
+    have_next = FALSE;
+
+  GST_HLS_MEDIA_PLAYLIST_UNLOCK (m3u8);
+
+  GST_DEBUG ("Returning %d", have_next);
+
+  return have_next;
+}
+
+
+GstM3U8MediaSegment *
+gst_hls_media_playlist_advance_fragment (GstHLSMediaPlaylist * m3u8,
+    GstM3U8MediaSegment * current, gboolean forward)
+{
+  GstM3U8MediaSegment *file = NULL;
+  guint idx;
+
+  g_return_val_if_fail (m3u8 != NULL, NULL);
+  g_return_val_if_fail (current != NULL, NULL);
+
+  GST_HLS_MEDIA_PLAYLIST_LOCK (m3u8);
+
+  GST_DEBUG ("playlist %s", m3u8->uri);
+
+  if (m3u8->segments->len < 2) {
+    GST_DEBUG ("Playlist only contains one fragment, can't advance");
+    goto out;
+  }
+
+  if (!g_ptr_array_find (m3u8->segments, current, &idx)) {
+    GST_ERROR ("Requested to advance froma fragment not present in playlist");
+    goto out;
+  }
+
+  if (forward && idx < (m3u8->segments->len - 1)) {
+    file =
+        gst_m3u8_media_segment_ref (g_ptr_array_index (m3u8->segments,
+            idx + 1));
+  } else if (!forward && idx > 0) {
+    file =
+        gst_m3u8_media_segment_ref (g_ptr_array_index (m3u8->segments,
+            idx - 1));
+  }
+
+  if (file)
+    GST_DEBUG ("Advanced to segment sn:%" G_GINT64_FORMAT " dsn:%"
+        G_GINT64_FORMAT, file->sequence, file->discont_sequence);
+  else
+    GST_DEBUG ("Could not find %s fragment", forward ? "next" : "previous");
+
+out:
+  GST_HLS_MEDIA_PLAYLIST_UNLOCK (m3u8);
+
+  return file;
+}
+
+GstClockTime
+gst_hls_media_playlist_get_duration (GstHLSMediaPlaylist * m3u8)
+{
+  GstClockTime duration = GST_CLOCK_TIME_NONE;
+
+  g_return_val_if_fail (m3u8 != NULL, GST_CLOCK_TIME_NONE);
+
+  GST_DEBUG ("playlist %s", m3u8->uri);
+
+  GST_HLS_MEDIA_PLAYLIST_LOCK (m3u8);
+  /* We can only get the duration for on-demand streams */
+  if (m3u8->endlist) {
+    if (m3u8->segments->len) {
+      GstM3U8MediaSegment *first = g_ptr_array_index (m3u8->segments, 0);
+      GstM3U8MediaSegment *last =
+          g_ptr_array_index (m3u8->segments, m3u8->segments->len - 1);
+      duration = last->stream_time + last->duration - first->stream_time;
+      if (duration != m3u8->duration)
+        GST_ERROR ("difference in calculated duration ? %" GST_TIME_FORMAT
+            " vs %" GST_TIME_FORMAT, GST_TIME_ARGS (duration),
+            GST_TIME_ARGS (m3u8->duration));
+    }
+    duration = m3u8->duration;
+  }
+  GST_HLS_MEDIA_PLAYLIST_UNLOCK (m3u8);
+
+  GST_DEBUG ("duration %" GST_TIME_FORMAT, GST_TIME_ARGS (duration));
+
+  return duration;
+}
+
+gchar *
+gst_hls_media_playlist_get_uri (GstHLSMediaPlaylist * m3u8)
+{
+  gchar *uri;
+
+  GST_HLS_MEDIA_PLAYLIST_LOCK (m3u8);
+  uri = g_strdup (m3u8->uri);
+  GST_HLS_MEDIA_PLAYLIST_UNLOCK (m3u8);
+
+  return uri;
+}
+
+gboolean
+gst_hls_media_playlist_is_live (GstHLSMediaPlaylist * m3u8)
+{
+  gboolean is_live;
+
+  g_return_val_if_fail (m3u8 != NULL, FALSE);
+
+  GST_HLS_MEDIA_PLAYLIST_LOCK (m3u8);
+  is_live = GST_HLS_MEDIA_PLAYLIST_IS_LIVE (m3u8);
+  GST_HLS_MEDIA_PLAYLIST_UNLOCK (m3u8);
+
+  return is_live;
+}
+
+gchar *
+uri_join (const gchar * uri1, const gchar * uri2)
+{
+  gchar *uri_copy, *tmp, *ret = NULL;
+
+  if (gst_uri_is_valid (uri2))
+    return g_strdup (uri2);
+
+  uri_copy = g_strdup (uri1);
+  if (uri2[0] != '/') {
+    /* uri2 is a relative uri2 */
+    /* look for query params */
+    tmp = g_utf8_strchr (uri_copy, -1, '?');
+    if (tmp) {
+      /* find last / char, ignoring query params */
+      tmp = g_utf8_strrchr (uri_copy, tmp - uri_copy, '/');
+    } else {
+      /* find last / char in URL */
+      tmp = g_utf8_strrchr (uri_copy, -1, '/');
+    }
+    if (!tmp)
+      goto out;
+
+
+    *tmp = '\0';
+    ret = g_strdup_printf ("%s/%s", uri_copy, uri2);
+  } else {
+    /* uri2 is an absolute uri2 */
+    char *scheme, *hostname;
+
+    scheme = uri_copy;
+    /* find the : in <scheme>:// */
+    tmp = g_utf8_strchr (uri_copy, -1, ':');
+    if (!tmp)
+      goto out;
+
+    *tmp = '\0';
+
+    /* skip :// */
+    hostname = tmp + 3;
+
+    tmp = g_utf8_strchr (hostname, -1, '/');
+    if (tmp)
+      *tmp = '\0';
+
+    ret = g_strdup_printf ("%s://%s%s", scheme, hostname, uri2);
+  }
+
+out:
+  g_free (uri_copy);
+  if (!ret)
+    GST_WARNING ("Can't build a valid uri from '%s' '%s'", uri1, uri2);
+
+  return ret;
+}
+
+gboolean
+gst_hls_media_playlist_get_seek_range (GstHLSMediaPlaylist * m3u8,
+    gint64 * start, gint64 * stop)
+{
+  GstM3U8MediaSegment *first, *last;
+  guint min_distance = 1;
+
+  g_return_val_if_fail (m3u8 != NULL, FALSE);
+
+  if (m3u8->segments->len < 1)
+    return FALSE;
+
+  first = g_ptr_array_index (m3u8->segments, 0);
+  *start = first->stream_time;
+
+  if (GST_HLS_MEDIA_PLAYLIST_IS_LIVE (m3u8)) {
+    /* min_distance is used to make sure the seek range is never closer than
+       GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE fragments from the end of a live
+       playlist - see 6.3.3. "Playing the Playlist file" of the HLS draft */
+    min_distance =
+        MIN (GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE, m3u8->segments->len - 1);
+  }
+
+  last = g_ptr_array_index (m3u8->segments, m3u8->segments->len - min_distance);
+  *stop = last->stream_time + last->duration;
+
+  return TRUE;
+}
+
+GstHLSRenditionStream *
+gst_hls_rendition_stream_ref (GstHLSRenditionStream * media)
+{
+  g_assert (media != NULL && media->ref_count > 0);
+  g_atomic_int_add (&media->ref_count, 1);
+  return media;
+}
+
+void
+gst_hls_rendition_stream_unref (GstHLSRenditionStream * media)
+{
+  g_assert (media != NULL && media->ref_count > 0);
+  if (g_atomic_int_dec_and_test (&media->ref_count)) {
+    g_free (media->group_id);
+    g_free (media->name);
+    g_free (media->uri);
+    g_free (media->lang);
+    g_free (media);
+  }
+}
+
+static GstHLSRenditionStreamType
+gst_m3u8_get_hls_media_type_from_string (const gchar * type_name)
+{
+  if (strcmp (type_name, "AUDIO") == 0)
+    return GST_HLS_RENDITION_STREAM_TYPE_AUDIO;
+  if (strcmp (type_name, "VIDEO") == 0)
+    return GST_HLS_RENDITION_STREAM_TYPE_VIDEO;
+  if (strcmp (type_name, "SUBTITLES") == 0)
+    return GST_HLS_RENDITION_STREAM_TYPE_SUBTITLES;
+  if (strcmp (type_name, "CLOSED_CAPTIONS") == 0)
+    return GST_HLS_RENDITION_STREAM_TYPE_CLOSED_CAPTIONS;
+
+  return GST_HLS_RENDITION_STREAM_TYPE_INVALID;
+}
+
+#define GST_HLS_RENDITION_STREAM_TYPE_NAME(mtype) gst_hls_rendition_stream_type_get_name(mtype)
+const gchar *
+gst_hls_rendition_stream_type_get_name (GstHLSRenditionStreamType mtype)
+{
+  static const gchar *nicks[GST_HLS_N_MEDIA_TYPES] = { "audio", "video",
+    "subtitle", "closed-captions"
+  };
+
+  if (mtype < 0 || mtype >= GST_HLS_N_MEDIA_TYPES)
+    return "invalid";
+
+  return nicks[mtype];
+}
+
+/* returns unquoted copy of string */
+static gchar *
+gst_m3u8_unquote (const gchar * str)
+{
+  const gchar *start, *end;
+
+  start = strchr (str, '"');
+  if (start == NULL)
+    return g_strdup (str);
+  end = strchr (start + 1, '"');
+  if (end == NULL) {
+    GST_WARNING ("Broken quoted string [%s] - can't find end quote", str);
+    return g_strdup (start + 1);
+  }
+  return g_strndup (start + 1, (gsize) (end - (start + 1)));
+}
+
+static GstHLSRenditionStream *
+gst_m3u8_parse_media (gchar * desc, const gchar * base_uri)
+{
+  GstHLSRenditionStream *media;
+  gchar *a, *v;
+
+  media = g_new0 (GstHLSRenditionStream, 1);
+  media->ref_count = 1;
+  media->mtype = GST_HLS_RENDITION_STREAM_TYPE_INVALID;
+
+  GST_LOG ("parsing %s", desc);
+  while (desc != NULL && parse_attributes (&desc, &a, &v)) {
+    if (strcmp (a, "TYPE") == 0) {
+      media->mtype = gst_m3u8_get_hls_media_type_from_string (v);
+    } else if (strcmp (a, "GROUP-ID") == 0) {
+      g_free (media->group_id);
+      media->group_id = gst_m3u8_unquote (v);
+    } else if (strcmp (a, "NAME") == 0) {
+      g_free (media->name);
+      media->name = gst_m3u8_unquote (v);
+    } else if (strcmp (a, "URI") == 0) {
+      gchar *uri;
+
+      g_free (media->uri);
+      uri = gst_m3u8_unquote (v);
+      media->uri = uri_join (base_uri, uri);
+      g_free (uri);
+    } else if (strcmp (a, "LANGUAGE") == 0) {
+      g_free (media->lang);
+      media->lang = gst_m3u8_unquote (v);
+    } else if (strcmp (a, "DEFAULT") == 0) {
+      media->is_default = g_ascii_strcasecmp (v, "yes") == 0;
+    } else if (strcmp (a, "FORCED") == 0) {
+      media->forced = g_ascii_strcasecmp (v, "yes") == 0;
+    } else if (strcmp (a, "AUTOSELECT") == 0) {
+      media->autoselect = g_ascii_strcasecmp (v, "yes") == 0;
+    } else {
+      /* unhandled: ASSOC-LANGUAGE, INSTREAM-ID, CHARACTERISTICS */
+      GST_FIXME ("EXT-X-MEDIA: unhandled attribute: %s = %s", a, v);
+    }
+  }
+
+  if (media->mtype == GST_HLS_RENDITION_STREAM_TYPE_INVALID)
+    goto required_attributes_missing;
+
+  if (media->group_id == NULL || media->name == NULL)
+    goto required_attributes_missing;
+
+  if (media->mtype == GST_HLS_RENDITION_STREAM_TYPE_CLOSED_CAPTIONS)
+    goto uri_with_cc;
+
+  GST_DEBUG ("media: %s, group '%s', name '%s', uri '%s', %s %s %s, lang=%s",
+      GST_HLS_RENDITION_STREAM_TYPE_NAME (media->mtype), media->group_id,
+      media->name, media->uri, media->is_default ? "default" : "-",
+      media->autoselect ? "autoselect" : "-", media->forced ? "forced" : "-",
+      media->lang ? media->lang : "??");
+
+  return media;
+
+uri_with_cc:
+  {
+    GST_WARNING ("closed captions EXT-X-MEDIA should not have URI specified");
+    goto out_error;
+  }
+required_attributes_missing:
+  {
+    GST_WARNING ("EXT-X-MEDIA description is missing required attributes");
+    goto out_error;
+  }
+
+out_error:
+  {
+    gst_hls_rendition_stream_unref (media);
+    return NULL;
+  }
+}
+
+GstStreamType
+gst_hls_get_stream_type_from_structure (GstStructure * st)
+{
+  const gchar *name = gst_structure_get_name (st);
+
+  if (g_str_has_prefix (name, "audio/"))
+    return GST_STREAM_TYPE_AUDIO;
+
+  if (g_str_has_prefix (name, "video/"))
+    return GST_STREAM_TYPE_VIDEO;
+
+  if (g_str_has_prefix (name, "application/x-subtitle"))
+    return GST_STREAM_TYPE_TEXT;
+
+  return 0;
+}
+
+GstStreamType
+gst_hls_get_stream_type_from_caps (GstCaps * caps)
+{
+  GstStreamType ret = 0;
+  guint i, nb;
+  nb = gst_caps_get_size (caps);
+  for (i = 0; i < nb; i++) {
+    GstStructure *cand = gst_caps_get_structure (caps, i);
+
+    ret |= gst_hls_get_stream_type_from_structure (cand);
+  }
+
+  return ret;
+}
+
+static GstHLSVariantStream *
+gst_hls_variant_stream_new (void)
+{
+  GstHLSVariantStream *stream;
+
+  stream = g_new0 (GstHLSVariantStream, 1);
+  stream->refcount = 1;
+  stream->codecs_stream_type = 0;
+  return stream;
+}
+
+GstHLSVariantStream *
+hls_variant_stream_ref (GstHLSVariantStream * stream)
+{
+  g_atomic_int_inc (&stream->refcount);
+  return stream;
+}
+
+void
+hls_variant_stream_unref (GstHLSVariantStream * stream)
+{
+  if (g_atomic_int_dec_and_test (&stream->refcount)) {
+    gint i;
+
+    g_free (stream->name);
+    g_free (stream->uri);
+    g_free (stream->codecs);
+    if (stream->caps)
+      gst_caps_unref (stream->caps);
+    for (i = 0; i < GST_HLS_N_MEDIA_TYPES; ++i) {
+      g_free (stream->media_groups[i]);
+    }
+    g_list_free_full (stream->fallback, g_free);
+    g_free (stream);
+  }
+}
+
+static GstHLSVariantStream *
+gst_hls_variant_parse (gchar * data, const gchar * base_uri)
+{
+  GstHLSVariantStream *stream;
+  gchar *v, *a;
+
+  stream = gst_hls_variant_stream_new ();
+  stream->iframe = g_str_has_prefix (data, "#EXT-X-I-FRAME-STREAM-INF:");
+  data += stream->iframe ? 26 : 18;
+
+  while (data && parse_attributes (&data, &a, &v)) {
+    if (g_str_equal (a, "BANDWIDTH")) {
+      if (!stream->bandwidth) {
+        if (!int_from_string (v, NULL, &stream->bandwidth))
+          GST_WARNING ("Error while reading BANDWIDTH");
+      }
+    } else if (g_str_equal (a, "AVERAGE-BANDWIDTH")) {
+      GST_DEBUG
+          ("AVERAGE-BANDWIDTH attribute available. Using it as stream bandwidth");
+      if (!int_from_string (v, NULL, &stream->bandwidth))
+        GST_WARNING ("Error while reading AVERAGE-BANDWIDTH");
+    } else if (g_str_equal (a, "PROGRAM-ID")) {
+      if (!int_from_string (v, NULL, &stream->program_id))
+        GST_WARNING ("Error while reading PROGRAM-ID");
+    } else if (g_str_equal (a, "CODECS")) {
+      g_free (stream->codecs);
+      stream->codecs = g_strdup (v);
+      stream->caps = gst_codec_utils_caps_from_mime_codec (stream->codecs);
+      stream->codecs_stream_type =
+          gst_hls_get_stream_type_from_caps (stream->caps);
+    } else if (g_str_equal (a, "RESOLUTION")) {
+      if (!int_from_string (v, &v, &stream->width))
+        GST_WARNING ("Error while reading RESOLUTION width");
+      if (!v || *v != 'x') {
+        GST_WARNING ("Missing height");
+      } else {
+        v = g_utf8_next_char (v);
+        if (!int_from_string (v, NULL, &stream->height))
+          GST_WARNING ("Error while reading RESOLUTION height");
+      }
+    } else if (stream->iframe && g_str_equal (a, "URI")) {
+      stream->uri = uri_join (base_uri, v);
+    } else if (g_str_equal (a, "AUDIO")) {
+      g_free (stream->media_groups[GST_HLS_RENDITION_STREAM_TYPE_AUDIO]);
+      stream->media_groups[GST_HLS_RENDITION_STREAM_TYPE_AUDIO] =
+          gst_m3u8_unquote (v);
+    } else if (g_str_equal (a, "SUBTITLES")) {
+      g_free (stream->media_groups[GST_HLS_RENDITION_STREAM_TYPE_SUBTITLES]);
+      stream->media_groups[GST_HLS_RENDITION_STREAM_TYPE_SUBTITLES] =
+          gst_m3u8_unquote (v);
+    } else if (g_str_equal (a, "VIDEO")) {
+      g_free (stream->media_groups[GST_HLS_RENDITION_STREAM_TYPE_VIDEO]);
+      stream->media_groups[GST_HLS_RENDITION_STREAM_TYPE_VIDEO] =
+          gst_m3u8_unquote (v);
+    } else if (g_str_equal (a, "CLOSED-CAPTIONS")) {
+      /* closed captions will be embedded inside the video stream, ignore */
+    }
+  }
+
+  return stream;
+}
+
+static gchar *
+generate_variant_stream_name (gchar * uri, gint bandwidth)
+{
+  gchar *checksum = g_compute_checksum_for_string (G_CHECKSUM_SHA1, uri, -1);
+  gchar *res = g_strdup_printf ("variant-%dbps-%s", bandwidth, checksum);
+
+  g_free (checksum);
+  return res;
+}
+
+static GstHLSVariantStream *
+find_variant_stream_by_name (GList * list, const gchar * name)
+{
+  for (; list != NULL; list = list->next) {
+    GstHLSVariantStream *variant_stream = list->data;
+
+    if (variant_stream->name != NULL && !strcmp (variant_stream->name, name))
+      return variant_stream;
+  }
+  return NULL;
+}
+
+static GstHLSVariantStream *
+find_variant_stream_by_uri (GList * list, const gchar * uri)
+{
+  for (; list != NULL; list = list->next) {
+    GstHLSVariantStream *variant_stream = list->data;
+
+    if (variant_stream->uri != NULL && !strcmp (variant_stream->uri, uri))
+      return variant_stream;
+  }
+  return NULL;
+}
+
+static GstHLSVariantStream *
+find_variant_stream_for_fallback (GList * list, GstHLSVariantStream * fallback)
+{
+  for (; list != NULL; list = list->next) {
+    GstHLSVariantStream *variant_stream = list->data;
+
+    if (variant_stream->bandwidth == fallback->bandwidth &&
+        variant_stream->width == fallback->width &&
+        variant_stream->height == fallback->height &&
+        variant_stream->iframe == fallback->iframe &&
+        !g_strcmp0 (variant_stream->codecs, fallback->codecs))
+      return variant_stream;
+  }
+  return NULL;
+}
+
+static GstHLSMasterPlaylist *
+gst_hls_master_playlist_new (void)
+{
+  GstHLSMasterPlaylist *playlist;
+
+  playlist = g_new0 (GstHLSMasterPlaylist, 1);
+  playlist->refcount = 1;
+  playlist->is_simple = FALSE;
+
+  return playlist;
+}
+
+void
+hls_master_playlist_unref (GstHLSMasterPlaylist * playlist)
+{
+  if (g_atomic_int_dec_and_test (&playlist->refcount)) {
+    g_list_free_full (playlist->variants,
+        (GDestroyNotify) gst_hls_variant_stream_unref);
+    g_list_free_full (playlist->iframe_variants,
+        (GDestroyNotify) gst_hls_variant_stream_unref);
+    if (playlist->default_variant)
+      gst_hls_variant_stream_unref (playlist->default_variant);
+    g_free (playlist->last_data);
+    g_free (playlist);
+  }
+}
+
+static gint
+hls_media_compare_func (GstHLSRenditionStream * ma, GstHLSRenditionStream * mb)
+{
+  if (ma->mtype != mb->mtype)
+    return ma->mtype - mb->mtype;
+
+  return strcmp (ma->name, mb->name) || strcmp (ma->group_id, mb->group_id);
+}
+
+static GstCaps *
+stream_get_media_caps (GstHLSVariantStream * stream,
+    GstHLSRenditionStreamType mtype)
+{
+  GstStructure *st = NULL;
+  GstCaps *ret;
+  guint i, nb;
+
+  if (stream->caps == NULL)
+    return NULL;
+
+  nb = gst_caps_get_size (stream->caps);
+  for (i = 0; i < nb; i++) {
+    GstStructure *cand = gst_caps_get_structure (stream->caps, i);
+    const gchar *name = gst_structure_get_name (cand);
+    gboolean matched;
+
+    switch (mtype) {
+      case GST_HLS_RENDITION_STREAM_TYPE_AUDIO:
+        matched = g_str_has_prefix (name, "audio/");
+        break;
+      case GST_HLS_RENDITION_STREAM_TYPE_VIDEO:
+        matched = g_str_has_prefix (name, "video/");
+        break;
+      case GST_HLS_RENDITION_STREAM_TYPE_SUBTITLES:
+        matched = g_str_has_prefix (name, "application/x-subtitle");
+        break;
+      default:
+        matched = FALSE;
+        break;
+    }
+
+    if (!matched)
+      continue;
+
+    if (st) {
+      GST_WARNING ("More than one caps for the same type, can't match");
+      return NULL;
+    }
+
+    st = cand;
+  }
+
+  if (!st)
+    return NULL;
+
+  ret = gst_caps_new_empty ();
+  gst_caps_append_structure (ret, gst_structure_copy (st));
+  return ret;
+
+}
+
+static gint
+gst_hls_variant_stream_compare_by_bitrate (gconstpointer a, gconstpointer b)
+{
+  const GstHLSVariantStream *vs_a = (const GstHLSVariantStream *) a;
+  const GstHLSVariantStream *vs_b = (const GstHLSVariantStream *) b;
+
+  if (vs_a->bandwidth == vs_b->bandwidth)
+    return g_strcmp0 (vs_a->name, vs_b->name);
+
+  return vs_a->bandwidth - vs_b->bandwidth;
+}
+
+/**
+ * gst_hls_master_playlist_new_from_data:
+ * @data: (transfer full): The manifest to parse
+ * @base_uri: The URI of the manifest
+ *
+ * Parse the provided manifest and construct the master playlist.
+ *
+ * Returns: The parse GstHLSMasterPlaylist , or NULL if there was an error.
+ */
+GstHLSMasterPlaylist *
+hls_master_playlist_new_from_data (gchar * data, const gchar * base_uri)
+{
+  GstHLSMasterPlaylist *playlist;
+  GstHLSVariantStream *pending_stream, *existing_stream;
+  gchar *end, *free_data = data;
+  gint val;
+  GList *tmp;
+  GstStreamType most_seen_types = 0;
+
+  if (!g_str_has_prefix (data, "#EXTM3U")) {
+    GST_WARNING ("Data doesn't start with #EXTM3U");
+    g_free (free_data);
+    return NULL;
+  }
+
+  playlist = gst_hls_master_playlist_new ();
+
+  /* store data before we modify it for parsing */
+  playlist->last_data = g_strdup (data);
+
+  GST_TRACE ("data:\n%s", data);
+
+  /* Detect early whether this manifest describes a simple media playlist or
+   * not */
+  if (strstr (data, "\n#EXTINF:") != NULL) {
+    GST_INFO ("This is a simple media playlist, not a master playlist");
+
+    pending_stream = gst_hls_variant_stream_new ();
+    pending_stream->name = g_strdup ("media-playlist");
+    pending_stream->uri = g_strdup (base_uri);
+    playlist->variants = g_list_append (playlist->variants, pending_stream);
+    playlist->default_variant = gst_hls_variant_stream_ref (pending_stream);
+    playlist->is_simple = TRUE;
+
+    return playlist;
+  }
+
+  /* Beginning of the actual master playlist parsing */
+  pending_stream = NULL;
+  data += 7;
+  while (TRUE) {
+    gchar *r;
+
+    end = g_utf8_strchr (data, -1, '\n');
+    if (end)
+      *end = '\0';
+
+    r = g_utf8_strchr (data, -1, '\r');
+    if (r)
+      *r = '\0';
+
+    if (data[0] != '#' && data[0] != '\0') {
+      gchar *name, *uri;
+
+      if (pending_stream == NULL) {
+        GST_LOG ("%s: got non-empty line without EXT-STREAM-INF, dropping",
+            data);
+        goto next_line;
+      }
+
+      uri = uri_join (base_uri, data);
+      if (uri == NULL)
+        goto next_line;
+
+      pending_stream->name = name =
+          generate_variant_stream_name (uri, pending_stream->bandwidth);
+      pending_stream->uri = uri;
+
+      if (find_variant_stream_by_name (playlist->variants, name)
+          || find_variant_stream_by_uri (playlist->variants, uri)) {
+        GST_DEBUG ("Already have a list with this name or URI: %s", name);
+        gst_hls_variant_stream_unref (pending_stream);
+      } else if ((existing_stream =
+              find_variant_stream_for_fallback (playlist->variants,
+                  pending_stream))) {
+        GST_DEBUG ("Adding to %s fallback URI %s", existing_stream->name,
+            pending_stream->uri);
+        existing_stream->fallback =
+            g_list_append (existing_stream->fallback,
+            g_strdup (pending_stream->uri));
+        gst_hls_variant_stream_unref (pending_stream);
+      } else {
+        GST_INFO ("stream %s @ %u: %s", name, pending_stream->bandwidth, uri);
+        playlist->variants = g_list_append (playlist->variants, pending_stream);
+        /* use first stream in the playlist as default */
+        if (playlist->default_variant == NULL) {
+          playlist->default_variant =
+              gst_hls_variant_stream_ref (pending_stream);
+        }
+      }
+      pending_stream = NULL;
+    } else if (g_str_has_prefix (data, "#EXT-X-VERSION:")) {
+      if (int_from_string (data + 15, &data, &val))
+        playlist->version = val;
+    } else if (g_str_has_prefix (data, "#EXT-X-STREAM-INF:") ||
+        g_str_has_prefix (data, "#EXT-X-I-FRAME-STREAM-INF:")) {
+      GstHLSVariantStream *stream = gst_hls_variant_parse (data, base_uri);
+
+      if (stream->iframe) {
+        if (find_variant_stream_by_uri (playlist->iframe_variants, stream->uri)) {
+          GST_DEBUG ("Already have a list with this URI");
+          gst_hls_variant_stream_unref (stream);
+        } else {
+          playlist->iframe_variants =
+              g_list_append (playlist->iframe_variants, stream);
+        }
+      } else {
+        if (pending_stream != NULL) {
+          GST_WARNING ("variant stream without uri, dropping");
+          gst_hls_variant_stream_unref (pending_stream);
+        }
+        pending_stream = stream;
+      }
+    } else if (g_str_has_prefix (data, "#EXT-X-MEDIA:")) {
+      GstHLSRenditionStream *media;
+
+      media = gst_m3u8_parse_media (data + strlen ("#EXT-X-MEDIA:"), base_uri);
+
+      if (media == NULL)
+        goto next_line;
+
+      if (g_list_find_custom (playlist->renditions, media,
+              (GCompareFunc) hls_media_compare_func)) {
+        GST_DEBUG ("Dropping duplicate alternate rendition group : %s", data);
+        gst_hls_rendition_stream_unref (media);
+        goto next_line;
+      }
+      playlist->renditions = g_list_append (playlist->renditions, media);
+      GST_INFO ("Stored media %s / group %s", media->name, media->group_id);
+    } else if (*data != '\0') {
+      GST_LOG ("Ignored line: %s", data);
+    }
+
+  next_line:
+    if (!end)
+      break;
+    data = g_utf8_next_char (end);      /* skip \n */
+  }
+
+  if (pending_stream != NULL) {
+    GST_WARNING ("#EXT-X-STREAM-INF without uri, dropping");
+    gst_hls_variant_stream_unref (pending_stream);
+  }
+
+  g_free (free_data);
+
+  if (playlist->variants == NULL) {
+    GST_WARNING ("Master playlist without any media playlists!");
+    gst_hls_master_playlist_unref (playlist);
+    return NULL;
+  }
+
+  /* reorder variants by bitrate */
+  playlist->variants =
+      g_list_sort (playlist->variants,
+      (GCompareFunc) gst_hls_variant_stream_compare_by_bitrate);
+
+  playlist->iframe_variants =
+      g_list_sort (playlist->iframe_variants,
+      (GCompareFunc) gst_hls_variant_stream_compare_by_bitrate);
+
+#ifndef GST_DISABLE_GST_DEBUG
+  /* Sanity check : If there are no codecs, a stream shouldn't point to
+   * alternate rendition groups.
+   *
+   * Write a warning to help with further debugging if this causes issues
+   * later */
+  for (tmp = playlist->variants; tmp; tmp = tmp->next) {
+    GstHLSVariantStream *stream = tmp->data;
+
+    if (stream->codecs == NULL) {
+      if (stream->media_groups[0] || stream->media_groups[1]
+          || stream->media_groups[2] || stream->media_groups[3]) {
+        GST_WARNING
+            ("Variant specifies alternate rendition groups but has no codecs specified");
+      }
+    }
+  }
+#endif
+
+  /* Filter out audio-only variants from audio+video stream */
+  for (tmp = playlist->variants; tmp; tmp = tmp->next) {
+    GstHLSVariantStream *stream = tmp->data;
+
+    most_seen_types |= stream->codecs_stream_type;
+  }
+
+  /* Flag the playlist to indicate whether all codecs are known or not on variants */
+  playlist->have_codecs = most_seen_types != 0;
+
+  GST_DEBUG ("have_codecs:%d most_seen_types:%d", playlist->have_codecs,
+      most_seen_types);
+
+  /* Filter out audio-only variants from audio+video stream */
+  if (playlist->have_codecs && most_seen_types != GST_STREAM_TYPE_AUDIO) {
+    tmp = playlist->variants;
+    while (tmp) {
+      GstHLSVariantStream *stream = tmp->data;
+
+      if (stream->codecs_stream_type != most_seen_types &&
+          stream->codecs_stream_type == GST_STREAM_TYPE_AUDIO) {
+        GST_DEBUG ("Remove variant with partial stream types %s", stream->name);
+        tmp = playlist->variants = g_list_remove (playlist->variants, stream);
+        gst_hls_variant_stream_unref (stream);
+      } else
+        tmp = tmp->next;
+    }
+  }
+
+  if (playlist->renditions) {
+    guint i;
+    /* Assign information from variants to alternate rendition groups. Note that
+     * at this point we know that there are caps present on the variants */
+    for (tmp = playlist->variants; tmp; tmp = tmp->next) {
+      GstHLSVariantStream *stream = tmp->data;
+
+      GST_DEBUG ("Post-processing Variant Stream '%s'", stream->name);
+
+      for (i = 0; i < GST_HLS_N_MEDIA_TYPES; ++i) {
+        gchar *alt_rend_group = stream->media_groups[i];
+
+        if (alt_rend_group) {
+          gboolean alt_in_variant = FALSE;
+          GstCaps *media_caps = stream_get_media_caps (stream, i);
+          GList *altlist;
+          if (!media_caps)
+            continue;
+          for (altlist = playlist->renditions; altlist; altlist = altlist->next) {
+            GstHLSRenditionStream *media = altlist->data;
+            if (media->mtype != i
+                || g_strcmp0 (media->group_id, alt_rend_group))
+              continue;
+            GST_DEBUG ("  %s caps:%" GST_PTR_FORMAT " media %s, uri: %s",
+                GST_HLS_RENDITION_STREAM_TYPE_NAME (i), media_caps, media->name,
+                media->uri);
+            if (media->uri == NULL) {
+              GST_DEBUG ("  Media is present in main variant stream");
+              alt_in_variant = TRUE;
+            } else {
+              /* Assign caps to media */
+              if (media->caps && !gst_caps_is_equal (media->caps, media_caps)) {
+                GST_ERROR ("  Media already has different caps %"
+                    GST_PTR_FORMAT, media->caps);
+              } else {
+                GST_DEBUG ("  Assigning caps %" GST_PTR_FORMAT, media_caps);
+                media->caps = gst_caps_ref (media_caps);
+              }
+            }
+          }
+          if (!alt_in_variant) {
+            GstCaps *new_caps = gst_caps_subtract (stream->caps, media_caps);
+            gst_caps_replace (&stream->caps, new_caps);
+          }
+          gst_caps_unref (media_caps);
+        }
+      }
+      GST_DEBUG ("Stream Ends up with caps %" GST_PTR_FORMAT, stream->caps);
+    }
+  }
+
+  GST_DEBUG
+      ("parsed master playlist with %d streams, %d I-frame streams and %d alternative rendition groups",
+      g_list_length (playlist->variants),
+      g_list_length (playlist->iframe_variants),
+      g_list_length (playlist->renditions));
+
+
+  return playlist;
+}
+
+GstHLSVariantStream *
+hls_master_playlist_get_variant_for_bitrate (GstHLSMasterPlaylist *
+    playlist, GstHLSVariantStream * current_variant, guint bitrate,
+    guint min_bitrate)
+{
+  GstHLSVariantStream *variant = current_variant;
+  GstHLSVariantStream *variant_by_min = current_variant;
+  GList *l;
+
+  /* variant lists are sorted low to high, so iterate from highest to lowest */
+  if (current_variant == NULL || !current_variant->iframe)
+    l = g_list_last (playlist->variants);
+  else
+    l = g_list_last (playlist->iframe_variants);
+
+  while (l != NULL) {
+    variant = l->data;
+    if (variant->bandwidth >= min_bitrate)
+      variant_by_min = variant;
+    if (variant->bandwidth <= bitrate)
+      break;
+    l = l->prev;
+  }
+
+  /* If variant bitrate is above the min_bitrate (or min_bitrate == 0)
+   * return it now */
+  if (variant && variant->bandwidth >= min_bitrate)
+    return variant;
+
+  /* Otherwise, return the last (lowest bitrate) variant we saw that
+   * was higher than the min_bitrate */
+  return variant_by_min;
+}
+
+static gboolean
+remove_uncommon (GQuark field_id, GValue * value, GstStructure * st2)
+{
+  const GValue *other;
+  GValue dest = G_VALUE_INIT;
+
+  other = gst_structure_id_get_value (st2, field_id);
+
+  if (other == NULL || (G_VALUE_TYPE (value) != G_VALUE_TYPE (other)))
+    return FALSE;
+
+  if (!gst_value_intersect (&dest, value, other))
+    return FALSE;
+
+  g_value_reset (value);
+  g_value_copy (&dest, value);
+
+  return TRUE;
+}
+
+/* Merge all common structures from caps1 and caps2
+ *
+ * Returns empty caps if a structure is not present in both */
+static GstCaps *
+gst_caps_merge_common (GstCaps * caps1, GstCaps * caps2)
+{
+  guint it1, it2;
+  GstCaps *res = gst_caps_new_empty ();
+
+  for (it1 = 0; it1 < gst_caps_get_size (caps1); it1++) {
+    GstStructure *st1 = gst_caps_get_structure (caps1, it1);
+    GstStructure *merged = NULL;
+    const gchar *name1 = gst_structure_get_name (st1);
+
+    for (it2 = 0; it2 < gst_caps_get_size (caps2); it2++) {
+      GstStructure *st2 = gst_caps_get_structure (caps2, it2);
+      if (gst_structure_has_name (st2, name1)) {
+        if (merged == NULL)
+          merged = gst_structure_copy (st1);
+        gst_structure_filter_and_map_in_place (merged,
+            (GstStructureFilterMapFunc) remove_uncommon, st2);
+      }
+    }
+
+    if (merged == NULL)
+      goto fail;
+    gst_caps_append_structure (res, merged);
+  }
+
+  return res;
+
+fail:
+  {
+    GST_ERROR ("Failed to create common caps");
+    gst_caps_unref (res);
+    return NULL;
+  }
+}
+
+GstCaps *
+hls_master_playlist_get_common_caps (GstHLSMasterPlaylist * playlist)
+{
+  GList *tmp;
+  GstCaps *res = NULL;
+
+  for (tmp = playlist->variants; tmp; tmp = tmp->next) {
+    GstHLSVariantStream *stream = tmp->data;
+
+    GST_DEBUG ("stream caps %" GST_PTR_FORMAT, stream->caps);
+    if (!stream->caps) {
+      /* If one of the stream doesn't have *any* caps, we can't reliably return
+       * any common caps */
+      if (res)
+        gst_caps_unref (res);
+      res = NULL;
+      goto beach;
+    }
+    if (!res) {
+      res = gst_caps_copy (stream->caps);
+    } else {
+      res = gst_caps_merge_common (res, stream->caps);
+      if (!res)
+        goto beach;
+    }
+  }
+
+  res = gst_caps_simplify (res);
+
+beach:
+  GST_DEBUG ("Returning common caps %" GST_PTR_FORMAT, res);
+
+  return res;
+}
+
+GstStreamType
+gst_stream_type_from_hls_type (GstHLSRenditionStreamType mtype)
+{
+  switch (mtype) {
+    case GST_HLS_RENDITION_STREAM_TYPE_AUDIO:
+      return GST_STREAM_TYPE_AUDIO;
+    case GST_HLS_RENDITION_STREAM_TYPE_VIDEO:
+      return GST_STREAM_TYPE_VIDEO;
+    case GST_HLS_RENDITION_STREAM_TYPE_SUBTITLES:
+      return GST_STREAM_TYPE_TEXT;
+    default:
+      return GST_STREAM_TYPE_UNKNOWN;
+  }
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/m3u8.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/m3u8.h
new file mode 100644 (file)
index 0000000..d1b535d
--- /dev/null
@@ -0,0 +1,416 @@
+/* GStreamer
+ * Copyright (C) 2010 Marc-Andre Lureau <marcandre.lureau@gmail.com>
+ * Copyright (C) 2010 Andoni Morales Alastruey <ylatuya@gmail.com>
+ * Copyright (C) 2015 Tim-Philipp Müller <tim@centricular.com>
+ *
+ * Copyright (C) 2021-2022 Centricular Ltd
+ *   Author: Edward Hervey <edward@centricular.com>
+ *   Author: Jan Schmidt <jan@centricular.com>
+ *
+ * m3u8.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __M3U8_H__
+#define __M3U8_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstHLSMediaPlaylist GstHLSMediaPlaylist;
+typedef struct _GstHLSTimeMap GstHLSTimeMap;
+typedef struct _GstM3U8MediaSegment GstM3U8MediaSegment;
+typedef struct _GstM3U8InitFile GstM3U8InitFile;
+typedef struct _GstHLSRenditionStream GstHLSRenditionStream;
+typedef struct _GstM3U8Client GstM3U8Client;
+typedef struct _GstHLSVariantStream GstHLSVariantStream;
+typedef struct _GstHLSMasterPlaylist GstHLSMasterPlaylist;
+
+#define GST_HLS_MEDIA_PLAYLIST(m) ((GstHLSMediaPlaylist*)m)
+#define GST_M3U8_MEDIA_SEGMENT(f) ((GstM3U8MediaSegment*)f)
+
+#define GST_HLS_MEDIA_PLAYLIST_LOCK(m) g_mutex_lock (&m->lock);
+#define GST_HLS_MEDIA_PLAYLIST_UNLOCK(m) g_mutex_unlock (&m->lock);
+
+#define GST_HLS_MEDIA_PLAYLIST_IS_LIVE(m) ((m)->endlist == FALSE)
+
+/* hlsdemux must not get closer to the end of a live stream than
+   GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE fragments. Section 6.3.3
+   "Playing the Playlist file" of the HLS draft states that this
+   value is three fragments */
+#define GST_M3U8_LIVE_MIN_FRAGMENT_DISTANCE 3
+
+typedef enum {
+  GST_HLS_PLAYLIST_TYPE_UNDEFINED,
+  GST_HLS_PLAYLIST_TYPE_EVENT,
+  GST_HLS_PLAYLIST_TYPE_VOD,
+} GstHLSPlaylistType;
+
+/**
+ * GstHLSMediaPlaylist:
+ *
+ * Official term in RFC : "Media Playlist". A List of Media Segments.
+ *
+ * It can be used by either a variant stream (GstHLSVariantStream) or an
+ * alternate rendition (GstHLSMedia).
+ *
+ * Note: Was called `GstM3u8` in legacy elements
+ */
+
+struct _GstHLSMediaPlaylist
+{
+  gchar *uri;                   /* actually downloaded URI */
+  gchar *base_uri;              /* URI to use as base for resolving relative URIs.
+                                 * This will be different to uri in case of redirects */
+  /* Base Tag */
+  gint version;                 /* EXT-X-VERSION (default 1) */
+
+  /* Media Playlist Tags */
+  GstClockTime targetduration;  /* EXT-X-TARGETDURATION, default GST_CLOCK_TIME_NONE */
+  gint64 media_sequence;       /* EXT-X-MEDIA-SEQUENCE, MSN of the first Media
+                                  Segment in the playlist. */
+  gint64 discont_sequence;     /* EXT-X-DISCONTINUITY-SEQUENCE. Default : 0 */
+  gboolean has_ext_x_dsn;      /* EXT-X-DISCONTINUITY-SEQUENCE present and specified */
+  gboolean endlist;             /* EXT-X-ENDLIST present */
+  GstHLSPlaylistType type;     /* EXT-X-PLAYLIST-TYPE. Default:
+                                  GST_HLS_PLAYLIST_TYE_UNDEFINED */
+  gboolean i_frame;            /* EXT-X-I-FRAMES-ONLY present. */
+
+  gboolean allowcache;         /* deprecated EXT-X-ALLOW-CACHE */
+
+  /* Overview of contained media segments */
+  gboolean ext_x_key_present;  /* a valid EXT-X-KEY is present on at least one
+                                  media segment */
+  gboolean ext_x_pdt_present;   /* a valid EXT-X-PROGRAM-DATE-TIME is present on
+                                  at least one media segment */
+
+  GPtrArray *segments;         /* Array of GstM3U8MediaSegment */
+
+  /* Generated information */
+  GstClockTime duration;       /* The estimated total duration of all segments
+                                  contained in this playlist */
+
+  gboolean reloaded;           /* If TRUE, this indicates that this playlist
+                                * was reloaded but had identical content */
+
+  /*< private > */
+  GMutex lock;
+
+  /* Copy of the incoming data that created this media playlist.
+   * See gst_hls_media_playlist_has_same_data()  */
+  gchar   *last_data;
+
+  gint ref_count;               /* ATOMIC */
+};
+
+/* gst_hls_media_playlist_new: Internal function : Do not use from demuxer code, only for unit
+ *               testing purposes */
+GstHLSMediaPlaylist * gst_hls_media_playlist_new (const gchar * uri,
+                                                 const gchar * base_uri);
+
+GstHLSMediaPlaylist * gst_hls_media_playlist_ref (GstHLSMediaPlaylist * m3u8);
+
+void                  gst_hls_media_playlist_unref (GstHLSMediaPlaylist * m3u8);
+
+/**
+ * GstM3U8MediaSegment:
+ *
+ * Official term in RFC : "Media Segment"
+ *
+ * Note : Naming in legacy elements was GstM3U8MediaFile
+ */
+struct _GstM3U8MediaSegment
+{
+  gchar *title;
+  GstClockTimeDiff stream_time;        /* Computed stream time */
+  GstClockTime duration;
+  gchar *uri;
+  gint64 sequence;             /* the sequence number of this segment */
+  gint64 discont_sequence;     /* The Discontinuity Sequence Number of this segment */
+  gboolean discont;             /* this file marks a discontinuity */
+  gchar *key;
+  guint8 iv[16];
+  gint64 offset, size;
+  gint ref_count;               /* ATOMIC */
+  GstM3U8InitFile *init_file;   /* Media Initialization (hold ref) */
+  GDateTime *datetime;         /* EXT-X-PROGRAM-DATE-TIME */
+};
+
+struct _GstM3U8InitFile
+{
+  gchar *uri;
+  gint64 offset, size;
+  guint ref_count;      /* ATOMIC */
+};
+
+GstM3U8MediaSegment *
+gst_m3u8_media_segment_ref   (GstM3U8MediaSegment * mfile);
+
+void
+gst_m3u8_media_segment_unref (GstM3U8MediaSegment * mfile);
+
+
+gboolean
+gst_hls_media_playlist_has_same_data (GstHLSMediaPlaylist * m3u8,
+                                     gchar   * playlist_data);
+
+GstHLSMediaPlaylist *
+gst_hls_media_playlist_parse (gchar        * data,
+                             const gchar  * uri,
+                             const gchar  * base_uri);
+
+void
+gst_hls_media_playlist_recalculate_stream_time (GstHLSMediaPlaylist *playlist,
+                                               GstM3U8MediaSegment *anchor);
+
+GstM3U8MediaSegment *
+gst_hls_media_playlist_sync_to_segment      (GstHLSMediaPlaylist * m3u8,
+                                            GstM3U8MediaSegment * segment);
+
+gboolean
+gst_hls_media_playlist_has_next_fragment    (GstHLSMediaPlaylist * m3u8,
+                                            GstM3U8MediaSegment * current,
+                                            gboolean  forward);
+
+GstM3U8MediaSegment *
+gst_hls_media_playlist_advance_fragment     (GstHLSMediaPlaylist * m3u8,
+                                            GstM3U8MediaSegment * current,
+                                            gboolean  forward);
+
+GstM3U8MediaSegment *
+gst_hls_media_playlist_get_starting_segment (GstHLSMediaPlaylist *self);
+
+GstClockTime
+gst_hls_media_playlist_get_duration         (GstHLSMediaPlaylist * m3u8);
+
+gchar *
+gst_hls_media_playlist_get_uri              (GstHLSMediaPlaylist * m3u8);
+
+gboolean
+gst_hls_media_playlist_is_live              (GstHLSMediaPlaylist * m3u8);
+
+gboolean
+gst_hls_media_playlist_get_seek_range       (GstHLSMediaPlaylist * m3u8,
+                                            gint64  * start,
+                                            gint64  * stop);
+
+GstM3U8MediaSegment *
+gst_hls_media_playlist_seek                 (GstHLSMediaPlaylist *playlist,
+                                            gboolean forward,
+                                            GstSeekFlags flags,
+                                            GstClockTimeDiff ts);
+void
+gst_hls_media_playlist_dump                 (GstHLSMediaPlaylist* self);
+
+typedef enum
+{
+  GST_HLS_RENDITION_STREAM_TYPE_INVALID = -1,
+  GST_HLS_RENDITION_STREAM_TYPE_AUDIO,
+  GST_HLS_RENDITION_STREAM_TYPE_VIDEO,
+  GST_HLS_RENDITION_STREAM_TYPE_SUBTITLES,
+  GST_HLS_RENDITION_STREAM_TYPE_CLOSED_CAPTIONS,
+  GST_HLS_N_MEDIA_TYPES
+} GstHLSRenditionStreamType;
+
+/**
+ * GstHLSRenditionStream:
+ *
+ * Official term in RFC : "Renditions are alternate versions of the content,
+ *   such as audio produced in different languages or video recorded from
+ *   different camera angles."
+ *
+ * Note: Was named GstHLSMedia in legacy elements
+ */
+
+struct _GstHLSRenditionStream {
+  GstHLSRenditionStreamType mtype;
+  gchar *group_id;
+  gchar *name;
+  gchar *lang;
+  gchar *uri;
+  GstCaps *caps;
+  gboolean is_default;
+  gboolean autoselect;
+  gboolean forced;
+
+  gint ref_count;               /* ATOMIC */
+};
+
+GstHLSRenditionStream *
+gst_hls_rendition_stream_ref   (GstHLSRenditionStream * media);
+
+void
+gst_hls_rendition_stream_unref (GstHLSRenditionStream * media);
+
+const gchar *
+gst_hls_rendition_stream_type_get_name (GstHLSRenditionStreamType mtype);
+
+
+/**
+ * GstHLSVariantStream:
+ *
+ * Official term in RFC :
+ * """
+ * A Master Playlist provides a set of Variant Streams, each of which describes
+ *   a different version of the same content.
+ *
+ * A Variant Stream includes a Media Playlist that specifies media encoded at a
+ *  particular bit rate, in a particular format, and at a particular resolution
+ *  for media containing video.
+ * """
+ */
+struct _GstHLSVariantStream {
+  gchar *name;         /* This will be the "name" of the playlist, the original
+                        * relative/absolute uri in a variant playlist */
+  gchar *uri;
+  gchar *codecs;
+  GstCaps *caps;
+  GstStreamType codecs_stream_type;    /* As defined by codecs */
+  gint bandwidth;                      /* bits per second */
+  gint program_id;
+  gint width;
+  gint height;
+  gboolean iframe;
+
+  gint refcount;       /* ATOMIC */
+
+  /* alternative renditions (names) */
+  gchar *media_groups[GST_HLS_N_MEDIA_TYPES];
+
+  /* List of gchar* fallback uri */
+  GList *fallback;
+};
+
+/* Notes: #define are to avoid symbol clashes with legacy hlsdemux */
+
+#define gst_hls_variant_stream_ref hls_variant_stream_ref
+GstHLSVariantStream * hls_variant_stream_ref (GstHLSVariantStream * stream);
+
+#define gst_hls_variant_stream_unref hls_variant_stream_unref
+void                  hls_variant_stream_unref (GstHLSVariantStream * stream);
+
+/**
+ * GstHLSMasterPlaylist:
+ *
+ * Official term in RFC : "A Playlist is either a Media Playlist or a Master
+ * Playlist."
+ *
+ * This is the top-level object, constructed by a manifest provided by external
+ * means.
+ */
+struct _GstHLSMasterPlaylist
+{
+  /* Available variant streams, sorted by bitrate (low -> high) */
+  GList    *variants;          /* GstHLSVariantStream */
+  GList    *iframe_variants;   /* GstHLSVariantStream */
+
+  /* Default variant, first in the list (originally, before sorting) */
+  GstHLSVariantStream *default_variant;
+
+  /* Full list of Available Alternative Rendition (GstHLSRenditionStream) */
+  GList    *renditions;
+
+  /* EXT-X-VERSION. 0 if unspecified */
+  gint      version;
+
+  /* TRUE if this playlist is a simple media playlist (and not a master
+   * playlist). Implies that there is only a single variant and no alternate
+   * rendition groups */
+  gboolean  is_simple;
+
+  /* TRUE if all variants have codecs specified */
+  gboolean have_codecs;
+
+  /*< private > */
+  gchar   *last_data;          /* Copy of the incoming data that created this master playlist */
+
+  gint      refcount;                    /* ATOMIC */
+};
+
+/* Notes: #define are to avoid symbol clashes with legacy hlsdemux */
+
+#define gst_hls_master_playlist_new_from_data hls_master_playlist_new_from_data
+GstHLSMasterPlaylist * hls_master_playlist_new_from_data (gchar       * data,
+                                                         const gchar * base_uri);
+
+#define gst_hls_master_playlist_get_variant_for_bitrate hls_master_playlist_get_variant_for_bitrate
+GstHLSVariantStream *  hls_master_playlist_get_variant_for_bitrate (GstHLSMasterPlaylist * playlist,
+                                                                   GstHLSVariantStream  * current_variant,
+                                                                   guint                  bitrate,
+                                                                   guint                  min_bitrate);
+
+#define gst_hls_master_playlist_get_common_caps hls_master_playlist_get_common_caps
+GstCaps *              hls_master_playlist_get_common_caps (GstHLSMasterPlaylist *playlist);
+
+#define gst_hls_master_playlist_unref hls_master_playlist_unref
+void                   hls_master_playlist_unref (GstHLSMasterPlaylist * playlist);
+
+
+/* Time Mapping
+ *
+ * Used to map GStreamer times to internal segment timestamps
+ */
+struct _GstHLSTimeMap {
+  /* DISCONT SEQUENCE NUMBER */
+  gint64 dsn;
+
+  /* The stream time (used for gst timestamps, gst segments, seeking ...) */
+  GstClockTime stream_time;
+
+  /* The optional Program Date Time reference */
+  GDateTime *pdt;
+
+  /* The internal time (ex: mpeg-ts PTS) */
+  GstClockTime internal_time;
+};
+
+GstStreamType          gst_stream_type_from_hls_type (GstHLSRenditionStreamType stype);
+GstStreamType          gst_hls_get_stream_type_from_structure (GstStructure *structure);
+GstStreamType          gst_hls_get_stream_type_from_caps (GstCaps *caps);
+
+#if !GLIB_CHECK_VERSION(2, 62, 0)
+static inline gchar* g_date_time_format_iso8601(GDateTime* datetime) {
+  GString* outstr = NULL;
+  gchar* main_date = NULL;
+  gint64 offset;
+
+  // Main date and time.
+  main_date = g_date_time_format(datetime, "%Y-%m-%dT%H:%M:%S");
+  outstr = g_string_new(main_date);
+  g_free(main_date);
+
+  // Timezone. Format it as `%:::z` unless the offset is zero, in which case
+  // we can simply use `Z`.
+  offset = g_date_time_get_utc_offset(datetime);
+
+  if (offset == 0) {
+    g_string_append_c(outstr, 'Z');
+  } else {
+    gchar* time_zone = g_date_time_format(datetime, "%:::z");
+    g_string_append(outstr, time_zone);
+    g_free(time_zone);
+  }
+
+  return g_string_free(outstr, FALSE);
+}
+#endif
+
+
+G_END_DECLS
+
+#endif /* __M3U8_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/meson.build b/subprojects/gst-plugins-good/ext/adaptivedemux2/hls/meson.build
new file mode 100644 (file)
index 0000000..c4cc3ad
--- /dev/null
@@ -0,0 +1,41 @@
+hls_sources = [
+  'hls/gsthlsdemux.c',
+  'hls/gsthlsdemux-util.c',
+  'hls/gsthlselement.c',
+  'hls/m3u8.c',
+]
+
+hls_cargs = []
+
+hls_crypto = get_option('hls-crypto')
+hls_crypto_dep = dependency('', required : false)
+
+if ['auto', 'nettle'].contains(hls_crypto)
+  hls_crypto_dep = dependency('nettle', version : '>= 3.0', required : false)
+  if hls_crypto_dep.found()
+    hls_cargs += ['-DHAVE_NETTLE']
+  endif
+endif
+
+if not hls_crypto_dep.found() and ['auto', 'libgcrypt'].contains(hls_crypto)
+  hls_crypto_dep = cc.find_library('gcrypt', required : false)
+  if hls_crypto_dep.found()
+    hls_cargs += ['-DHAVE_LIBGCRYPT']
+  endif
+endif
+
+if not hls_crypto_dep.found() and ['auto', 'openssl'].contains(hls_crypto)
+  hls_crypto_dep = dependency('openssl', required : false)
+  if hls_crypto_dep.found()
+    hls_cargs += ['-DHAVE_OPENSSL']
+  endif
+endif
+
+if not hls_crypto_dep.found()
+  if hls_crypto == 'auto'
+    message('Could not find a supported crypto library for HLS support')
+  else
+    error('HLS crypto support library "@0@" not found'.format(hls_crypto))
+  endif
+endif
+hls_dep = declare_dependency(include_directories : include_directories('.'))
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/meson.build b/subprojects/gst-plugins-good/ext/adaptivedemux2/meson.build
new file mode 100644 (file)
index 0000000..61fc394
--- /dev/null
@@ -0,0 +1,153 @@
+dash_sources = [
+  'dash/gstdashdemux.c',
+  'dash/gstmpdnode.c',
+  'dash/gstmpdrootnode.c',
+  'dash/gstmpdbaseurlnode.c',
+  'dash/gstmpdutctimingnode.c',
+  'dash/gstmpdmetricsnode.c',
+  'dash/gstmpdmetricsrangenode.c',
+  'dash/gstmpdsnode.c',
+  'dash/gstmpdsegmenttimelinenode.c',
+  'dash/gstmpdsegmenttemplatenode.c',
+  'dash/gstmpdsegmenturlnode.c',
+  'dash/gstmpdsegmentlistnode.c',
+  'dash/gstmpdsegmentbasenode.c',
+  'dash/gstmpdperiodnode.c',
+  'dash/gstmpdrepresentationbasenode.c',
+  'dash/gstmpdmultsegmentbasenode.c',
+  'dash/gstmpdrepresentationnode.c',
+  'dash/gstmpdsubrepresentationnode.c',
+  'dash/gstmpdcontentcomponentnode.c',
+  'dash/gstmpdadaptationsetnode.c',
+  'dash/gstmpdsubsetnode.c',
+  'dash/gstmpdprograminformationnode.c',
+  'dash/gstmpdlocationnode.c',
+  'dash/gstmpdreportingnode.c',
+  'dash/gstmpdurltypenode.c',
+  'dash/gstmpddescriptortypenode.c',
+  'dash/gstxmlhelper.c',
+  'dash/gstmpdhelper.c',
+  'dash/gstmpdparser.c',
+  'dash/gstmpdclient.c'
+]
+
+smoothstreaming_sources = [
+  'mss/gstmssdemux.c',
+  'mss/gstmssmanifest.c',
+  'mss/gstmssfragmentparser.c',
+]
+
+plugin_sources = [
+  'plugin.c',
+  'gstisoff.c',
+  'gstadaptivedemux.c',
+  'gstadaptivedemuxutils.c',
+  'gstadaptivedemux-period.c',
+  'gstadaptivedemux-stream.c',
+  'gstadaptivedemux-track.c',
+  'downloadhelper.c',
+  'downloadrequest.c',
+  '../soup/gstsouploader.c'
+]
+
+# Used for unit tests, so need to be defined even if we skip the subdir
+hls_dep = dependency('', required : false)
+adaptivedemux2_dep = dependency('', required : false)
+
+adaptivedemux2_opt = get_option('adaptivedemux2')
+if adaptivedemux2_opt.disabled()
+  message('Not building adaptivedemux2 plugin because it was disabled')
+  subdir_done()
+endif
+
+adaptive_xml2_dep = dependency('libxml-2.0', version : '>= 2.8', allow_fallback: true, required: adaptivedemux2_opt)
+
+if not adaptive_xml2_dep.found()
+  message(f'Not building adaptivedemux2 plugin: libxml2 is needed')
+  subdir_done()
+endif
+
+subdir('hls')
+
+plugin_sources += dash_sources
+plugin_sources += smoothstreaming_sources
+plugin_sources += hls_sources
+
+soup_loader_args = ['-DBUILDING_ADAPTIVEDEMUX2']
+
+default_library = get_option('default_library')
+if default_library in ['static', 'both']
+  libsoup2_dep = dependency('libsoup-2.4', version : '>=2.48',
+                            required : false, fallback : ['libsoup', 'libsoup_dep'],
+                            default_options: ['sysprof=disabled'])
+  libsoup3_dep = dependency('libsoup-3.0', required : false,
+                            fallback : ['libsoup3', 'libsoup_dep'])
+
+  if libsoup3_dep.found()
+    soup_dep = libsoup3_dep
+    static_soup_loader_args = ['-DSTATIC_SOUP=3']
+  elif libsoup2_dep.found()
+    soup_dep = libsoup2_dep
+    static_soup_loader_args = ['-DSTATIC_SOUP=2']
+  else
+    if adaptivedemux2_opt.enabled()
+      error(f'adaptivedemux2: Either libsoup2 or libsoup3 is needed for build with default_library=@default_library@')
+    endif
+
+    message(f'Not building adaptivedemux2 plugin: either libsoup2 or libsoup3 is needed for build with default_library=@default_library@')
+    subdir_done()
+  endif
+
+  # Static plugin links to libsoup directly at build time
+  adaptivedemux2_static = static_library('gstadaptivedemux2',
+    plugin_sources,
+    include_directories: [configinc, libsinc],
+    c_args: [gst_plugins_good_args, soup_loader_args, soup_loader_args, hls_cargs,
+             '-DGST_ISOFF_API=G_GNUC_INTERNAL'],
+    dependencies: [gsttag_dep, gstnet_dep, gstbase_dep,
+                   gstpbutils_dep, gstapp_dep, soup_dep,
+                   gio_dep, adaptive_xml2_dep,
+                   hls_crypto_dep, libm],
+    install: true,
+    install_dir: plugins_install_dir)
+endif
+
+if default_library in ['shared', 'both']
+  # Shared plugin doesn't link to libsoup but dlopen()s it at runtime
+  libdl = cc.find_library('dl', required: false)
+
+  adaptivedemux2_shared = shared_library('gstadaptivedemux2',
+    plugin_sources,
+    include_directories: [configinc, libsinc],
+    c_args: [gst_plugins_good_args, soup_loader_args, hls_cargs,
+             '-DGST_ISOFF_API=G_GNUC_INTERNAL'],
+    dependencies: [gsttag_dep, gstnet_dep, gstbase_dep,
+                   gstpbutils_dep, gstapp_dep, gio_dep,
+                   gmodule_dep, adaptive_xml2_dep,
+                   hls_crypto_dep, libm, libdl],
+    install: true,
+    install_dir: plugins_install_dir)
+endif
+
+# Use the static library to generate the .pc file if it's available. The shared
+# library .pc file does not have a Requires: on libsoup, and we use plugin
+# .pc files to generate dependencies for linking plugins statically.
+if default_library == 'shared'
+  pkgconfig.generate(adaptivedemux2_shared, install_dir: plugins_pkgconfig_install_dir)
+else
+  pkgconfig.generate(adaptivedemux2_static, install_dir: plugins_pkgconfig_install_dir)
+endif
+
+# Add the shared library to the plugins list if available. We pass this list of
+# plugins to hotdoc to generate the plugins cache, which introspects the plugin
+# by loading it. We need the shared plugin for that.
+if default_library == 'static'
+  plugins += [adaptivedemux2_static]
+else
+  plugins += [adaptivedemux2_shared]
+endif
+
+# For unit tests
+adaptivedemux2_dep = declare_dependency(
+  include_directories : include_directories('.'),
+  dependencies: adaptive_xml2_dep)
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssdemux.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssdemux.c
new file mode 100644 (file)
index 0000000..ca790ba
--- /dev/null
@@ -0,0 +1,671 @@
+/* GStreamer
+ * Copyright (C) 2012 Smart TV Alliance
+ *  Author: Thiago Sousa Santos <thiago.sousa.santos@collabora.com>, Collabora Ltd.
+ *
+ * gstmssdemux.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:element-mssdemux
+ * @title: mssdemux
+ *
+ * Demuxes a Microsoft's Smooth Streaming manifest into its audio and/or video streams.
+ *
+ */
+
+/*
+ * == Internals
+ *
+ * = Smooth streaming in a few lines
+ * A SS stream is defined by a xml manifest file. This file has a list of
+ * tracks (StreamIndex), each one can have multiple QualityLevels, that define
+ * different encoding/bitrates. When playing a track, only one of those
+ * QualityLevels can be active at a time (per stream).
+ *
+ * The StreamIndex defines a URL with {time} and {bitrate} tags that are
+ * replaced by values indicated by the fragment start times and the selected
+ * QualityLevel, that generates the fragments URLs.
+ *
+ * Another relevant detail is that the Isomedia fragments for smoothstreaming
+ * won't contains a 'moov' atom, nor a 'stsd', so there is no information
+ * about the media type/configuration on the fragments, it must be extracted
+ * from the Manifest and passed downstream. mssdemux does this via GstCaps.
+ *
+ * = How mssdemux works
+ * There is a gstmssmanifest.c utility that holds the manifest and parses
+ * and has functions to extract information from it. mssdemux received the
+ * manifest from its sink pad and starts processing it when it gets EOS.
+ *
+ * The Manifest is parsed and the streams are exposed, 1 pad for each, with
+ * a initially selected QualityLevel. Each stream starts its own GstTaks that
+ * is responsible for downloading fragments and pushing them downstream.
+ *
+ * When a new connection-speed is set, mssdemux evaluates the available
+ * QualityLevels and might decide to switch to another one. In this case it
+ * pushes a new GstCaps event indicating the new caps on the pads.
+ *
+ * All operations that intend to update the GstTasks state should be protected
+ * with the GST_OBJECT_LOCK.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gst/gst-i18n-plugin.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "gstmssdemux.h"
+
+GST_DEBUG_CATEGORY (mssdemux2_debug);
+
+static GstStaticPadTemplate gst_mss_demux_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+    GST_PAD_SINK,
+    GST_PAD_ALWAYS,
+    GST_STATIC_CAPS ("application/vnd.ms-sstr+xml")
+    );
+
+static GstStaticPadTemplate gst_mss_demux_videosrc_template =
+GST_STATIC_PAD_TEMPLATE ("video_%02u",
+    GST_PAD_SRC,
+    GST_PAD_SOMETIMES,
+    GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_mss_demux_audiosrc_template =
+GST_STATIC_PAD_TEMPLATE ("audio_%02u",
+    GST_PAD_SRC,
+    GST_PAD_SOMETIMES,
+    GST_STATIC_CAPS_ANY);
+
+typedef struct _GstMssDemux2 GstMssDemux2;
+typedef struct _GstMssDemux2Class GstMssDemux2Class;
+
+#define gst_mss_demux2_parent_class parent_class
+G_DEFINE_TYPE (GstMssDemux2, gst_mss_demux2, GST_TYPE_ADAPTIVE_DEMUX);
+
+#define gst_hls_demux_stream_parent_class stream_parent_class
+G_DEFINE_TYPE (GstMssDemuxStream, gst_mss_demux_stream,
+    GST_TYPE_ADAPTIVE_DEMUX2_STREAM);
+
+GST_ELEMENT_REGISTER_DEFINE (mssdemux2, "mssdemux2",
+    GST_RANK_PRIMARY + 1, GST_TYPE_MSS_DEMUX2);
+
+static void gst_mss_demux_dispose (GObject * object);
+
+static gboolean gst_mss_demux_is_live (GstAdaptiveDemux * demux);
+static gboolean gst_mss_demux_process_manifest (GstAdaptiveDemux * demux,
+    GstBuffer * buffer);
+static GstClockTime gst_mss_demux_get_duration (GstAdaptiveDemux * demux);
+static void gst_mss_demux_reset (GstAdaptiveDemux * demux);
+static GstFlowReturn gst_mss_demux_stream_seek (GstAdaptiveDemux2Stream *
+    stream, gboolean forward, GstSeekFlags flags, GstClockTimeDiff ts,
+    GstClockTimeDiff * final_ts);
+static gboolean gst_mss_demux_stream_has_next_fragment (GstAdaptiveDemux2Stream
+    * stream);
+static GstFlowReturn
+gst_mss_demux_stream_advance_fragment (GstAdaptiveDemux2Stream * stream);
+static gboolean gst_mss_demux_stream_select_bitrate (GstAdaptiveDemux2Stream *
+    stream, guint64 bitrate);
+static GstFlowReturn
+gst_mss_demux_stream_update_fragment_info (GstAdaptiveDemux2Stream * stream);
+static gboolean gst_mss_demux_seek (GstAdaptiveDemux * demux, GstEvent * seek);
+static gint64
+gst_mss_demux_get_manifest_update_interval (GstAdaptiveDemux * demux);
+static GstClockTime
+gst_mss_demux_stream_get_fragment_waiting_time (GstAdaptiveDemux2Stream *
+    stream);
+static GstFlowReturn
+gst_mss_demux_update_manifest_data (GstAdaptiveDemux * demux,
+    GstBuffer * buffer);
+static gboolean gst_mss_demux_get_live_seek_range (GstAdaptiveDemux * demux,
+    gint64 * start, gint64 * stop);
+static GstFlowReturn gst_mss_demux_data_received (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer);
+static gboolean
+gst_mss_demux_requires_periodical_playlist_update (GstAdaptiveDemux * demux);
+GstStreamType gst_stream_type_from_mss_type (GstMssStreamType mtype);
+
+static void
+gst_mss_demux_stream_class_init (GstMssDemuxStreamClass * klass)
+{
+}
+
+static void
+gst_mss_demux_stream_init (GstMssDemuxStream * stream)
+{
+}
+
+static void
+gst_mss_demux2_class_init (GstMssDemuxClass * klass)
+{
+  GObjectClass *gobject_class;
+  GstElementClass *gstelement_class;
+  GstAdaptiveDemuxClass *gstadaptivedemux_class;
+
+  gobject_class = (GObjectClass *) klass;
+  gstelement_class = (GstElementClass *) klass;
+  gstadaptivedemux_class = (GstAdaptiveDemuxClass *) klass;
+
+  gst_element_class_add_static_pad_template (gstelement_class,
+      &gst_mss_demux_sink_template);
+  gst_element_class_add_static_pad_template (gstelement_class,
+      &gst_mss_demux_videosrc_template);
+  gst_element_class_add_static_pad_template (gstelement_class,
+      &gst_mss_demux_audiosrc_template);
+  gst_element_class_set_static_metadata (gstelement_class,
+      "Smooth Streaming demuxer (v2)", "Codec/Demuxer/Adaptive",
+      "Parse and demultiplex a Smooth Streaming manifest into audio and video "
+      "streams", "Thiago Santos <thiago.sousa.santos@collabora.com>");
+
+  gobject_class->dispose = gst_mss_demux_dispose;
+
+  gstadaptivedemux_class->process_manifest = gst_mss_demux_process_manifest;
+  gstadaptivedemux_class->is_live = gst_mss_demux_is_live;
+  gstadaptivedemux_class->get_duration = gst_mss_demux_get_duration;
+  gstadaptivedemux_class->get_manifest_update_interval =
+      gst_mss_demux_get_manifest_update_interval;
+  gstadaptivedemux_class->reset = gst_mss_demux_reset;
+  gstadaptivedemux_class->seek = gst_mss_demux_seek;
+  gstadaptivedemux_class->stream_seek = gst_mss_demux_stream_seek;
+  gstadaptivedemux_class->stream_advance_fragment =
+      gst_mss_demux_stream_advance_fragment;
+  gstadaptivedemux_class->stream_has_next_fragment =
+      gst_mss_demux_stream_has_next_fragment;
+  gstadaptivedemux_class->stream_select_bitrate =
+      gst_mss_demux_stream_select_bitrate;
+  gstadaptivedemux_class->stream_update_fragment_info =
+      gst_mss_demux_stream_update_fragment_info;
+  gstadaptivedemux_class->stream_get_fragment_waiting_time =
+      gst_mss_demux_stream_get_fragment_waiting_time;
+  gstadaptivedemux_class->update_manifest_data =
+      gst_mss_demux_update_manifest_data;
+  gstadaptivedemux_class->get_live_seek_range =
+      gst_mss_demux_get_live_seek_range;
+  gstadaptivedemux_class->data_received = gst_mss_demux_data_received;
+  gstadaptivedemux_class->requires_periodical_playlist_update =
+      gst_mss_demux_requires_periodical_playlist_update;
+
+  GST_DEBUG_CATEGORY_INIT (mssdemux2_debug, "mssdemux2", 0,
+      "mssdemux2 element");
+}
+
+static void
+gst_mss_demux2_init (GstMssDemux * mssdemux)
+{
+}
+
+static void
+gst_mss_demux_reset (GstAdaptiveDemux * demux)
+{
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (demux);
+
+  if (mssdemux->manifest) {
+    gst_mss_manifest_free (mssdemux->manifest);
+    mssdemux->manifest = NULL;
+  }
+  g_free (mssdemux->base_url);
+  mssdemux->base_url = NULL;
+}
+
+static void
+gst_mss_demux_dispose (GObject * object)
+{
+  gst_mss_demux_reset (GST_ADAPTIVE_DEMUX_CAST (object));
+
+  G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static gboolean
+gst_mss_demux_is_live (GstAdaptiveDemux * demux)
+{
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (demux);
+
+  g_return_val_if_fail (mssdemux->manifest != NULL, FALSE);
+
+  return gst_mss_manifest_is_live (mssdemux->manifest);
+}
+
+static GstClockTime
+gst_mss_demux_get_duration (GstAdaptiveDemux * demux)
+{
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (demux);
+
+  g_return_val_if_fail (mssdemux->manifest != NULL, FALSE);
+
+  return gst_mss_manifest_get_gst_duration (mssdemux->manifest);
+}
+
+static GstFlowReturn
+gst_mss_demux_stream_update_fragment_info (GstAdaptiveDemux2Stream * stream)
+{
+  GstMssDemuxStream *mssstream = (GstMssDemuxStream *) stream;
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (stream->demux);
+  GstFlowReturn ret;
+  gchar *path = NULL;
+
+  gst_adaptive_demux2_stream_fragment_clear (&stream->fragment);
+  ret = gst_mss_stream_get_fragment_url (mssstream->manifest_stream, &path);
+
+  if (ret == GST_FLOW_OK) {
+    stream->fragment.uri = g_strdup_printf ("%s/%s", mssdemux->base_url, path);
+    stream->fragment.stream_time =
+        gst_mss_stream_get_fragment_gst_timestamp (mssstream->manifest_stream);
+    stream->fragment.duration =
+        gst_mss_stream_get_fragment_gst_duration (mssstream->manifest_stream);
+  }
+  g_free (path);
+
+  return ret;
+}
+
+static GstFlowReturn
+gst_mss_demux_stream_seek (GstAdaptiveDemux2Stream * stream, gboolean forward,
+    GstSeekFlags flags, GstClockTimeDiff ts, GstClockTimeDiff * final_ts)
+{
+  GstMssDemuxStream *mssstream = (GstMssDemuxStream *) stream;
+
+  gst_mss_stream_seek (mssstream->manifest_stream, forward, flags, ts,
+      final_ts);
+  return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_mss_demux_stream_advance_fragment (GstAdaptiveDemux2Stream * stream)
+{
+  GstMssDemuxStream *mssstream = (GstMssDemuxStream *) stream;
+
+  if (stream->demux->segment.rate >= 0)
+    return gst_mss_stream_advance_fragment (mssstream->manifest_stream);
+  else
+    return gst_mss_stream_regress_fragment (mssstream->manifest_stream);
+}
+
+static GstCaps *
+create_mss_caps (GstMssDemuxStream * stream, GstCaps * caps)
+{
+  return gst_caps_new_simple ("video/quicktime", "variant", G_TYPE_STRING,
+      "mss-fragmented", "timescale", G_TYPE_UINT64,
+      gst_mss_stream_get_timescale (stream->manifest_stream), "media-caps",
+      GST_TYPE_CAPS, caps, NULL);
+}
+
+static void
+gst_mss_demux_apply_protection_system (GstCaps * caps,
+    const gchar * selected_system)
+{
+  GstStructure *s;
+
+  g_return_if_fail (selected_system);
+  s = gst_caps_get_structure (caps, 0);
+  gst_structure_set (s,
+      "original-media-type", G_TYPE_STRING, gst_structure_get_name (s),
+      GST_PROTECTION_SYSTEM_ID_CAPS_FIELD, G_TYPE_STRING, selected_system,
+      NULL);
+  gst_structure_set_name (s, "application/x-cenc");
+
+}
+
+static gboolean
+gst_mss_demux_setup_streams (GstAdaptiveDemux * demux)
+{
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (demux);
+  GSList *streams = gst_mss_manifest_get_streams (mssdemux->manifest);
+  GSList *active_streams = NULL;
+  GSList *iter;
+  const gchar *protection_system_id =
+      gst_mss_manifest_get_protection_system_id (mssdemux->manifest);
+  const gchar *protection_data =
+      gst_mss_manifest_get_protection_data (mssdemux->manifest);
+  gboolean protected = protection_system_id && protection_data;
+  const gchar *selected_system = NULL;
+  guint64 max_bitrate = G_MAXUINT64;
+
+  if (streams == NULL) {
+    GST_INFO_OBJECT (mssdemux, "No streams found in the manifest");
+    GST_ELEMENT_ERROR (mssdemux, STREAM, DEMUX,
+        (_("This file contains no playable streams.")),
+        ("no streams found at the Manifest"));
+    return FALSE;
+  }
+
+  if (protected) {
+    const gchar *sys_ids[2] = { protection_system_id, NULL };
+
+    selected_system = gst_protection_select_system (sys_ids);
+    if (!selected_system) {
+      GST_ERROR_OBJECT (mssdemux, "stream is protected, but no "
+          "suitable decryptor element has been found");
+      return FALSE;
+    }
+  }
+
+  if (demux->connection_speed != 0)
+    max_bitrate = demux->connection_speed;
+
+  for (iter = streams; iter; iter = g_slist_next (iter)) {
+    GstAdaptiveDemux2Stream *stream = NULL;
+    GstMssDemuxStream *mss_stream;
+    GstMssStream *manifeststream = iter->data;
+    GstAdaptiveDemuxTrack *track;
+    GstStreamType stream_type =
+        gst_stream_type_from_mss_type (gst_mss_stream_get_type
+        (manifeststream));
+    const gchar *lang, *stream_id = gst_stream_type_get_name (stream_type);
+    gchar *name;
+    GstCaps *caps;
+    GstTagList *tags = NULL;
+
+    name = g_strdup_printf ("mss-stream-%s", stream_id);
+    mss_stream = g_object_new (GST_TYPE_MSS_DEMUX_STREAM, "name", name, NULL);
+    g_free (name);
+
+    stream = GST_ADAPTIVE_DEMUX2_STREAM_CAST (mss_stream);
+    stream->stream_type = stream_type;
+
+    mss_stream->manifest_stream = manifeststream;
+    gst_mss_stream_set_active (manifeststream, TRUE);
+
+    /* Set the maximum bitrate now that the underlying stream is active. This
+     * ensures that we get the proper caps and information. */
+    gst_mss_stream_select_bitrate (manifeststream, max_bitrate);
+
+    caps = gst_mss_stream_get_caps (mss_stream->manifest_stream);
+    gst_adaptive_demux2_stream_set_caps (stream, create_mss_caps (mss_stream,
+            caps));
+    lang = gst_mss_stream_get_lang (mss_stream->manifest_stream);
+    if (lang != NULL)
+      tags = gst_tag_list_new (GST_TAG_LANGUAGE_CODE, lang, NULL);
+
+    track = gst_adaptive_demux_track_new (demux, stream_type,
+        GST_STREAM_FLAG_NONE, (gchar *) stream_id, create_mss_caps (mss_stream,
+            caps), tags);
+
+    gst_adaptive_demux2_add_stream (demux, stream);
+    gst_adaptive_demux2_stream_add_track (stream, track);
+    gst_adaptive_demux_track_unref (track);
+
+    GST_DEBUG_OBJECT (stream, "Current quality bitrate %" G_GUINT64_FORMAT,
+        gst_mss_stream_get_current_bitrate (manifeststream));
+
+    if (tags)
+      gst_adaptive_demux2_stream_set_tags (stream, tags);
+
+    active_streams = g_slist_prepend (active_streams, mss_stream);
+  }
+
+  for (iter = active_streams; iter; iter = g_slist_next (iter)) {
+    GstMssDemuxStream *stream = iter->data;
+
+    if (protected) {
+      GstBuffer *protection_buffer =
+          gst_buffer_new_wrapped (g_strdup (protection_data),
+          strlen (protection_data));
+      GstEvent *event =
+          gst_event_new_protection (protection_system_id, protection_buffer,
+          "smooth-streaming");
+
+      GST_LOG_OBJECT (stream, "Queueing Protection event on source pad");
+      gst_adaptive_demux2_stream_queue_event ((GstAdaptiveDemux2Stream *)
+          stream, event);
+      gst_buffer_unref (protection_buffer);
+    }
+  }
+
+  g_slist_free (active_streams);
+  return TRUE;
+}
+
+static void
+gst_mss_demux_update_base_url (GstMssDemux * mssdemux)
+{
+  GstAdaptiveDemux *demux = GST_ADAPTIVE_DEMUX_CAST (mssdemux);
+  gchar *baseurl_end;
+
+  g_free (mssdemux->base_url);
+
+  mssdemux->base_url =
+      g_strdup (demux->manifest_base_uri ? demux->manifest_base_uri : demux->
+      manifest_uri);
+  baseurl_end = g_strrstr (mssdemux->base_url, "/Manifest");
+  if (baseurl_end == NULL) {
+    /* second try */
+    baseurl_end = g_strrstr (mssdemux->base_url, "/manifest");
+  }
+  if (baseurl_end) {
+    /* set the new end of the string */
+    baseurl_end[0] = '\0';
+  } else {
+    GST_WARNING_OBJECT (mssdemux, "Stream's URI didn't end with /manifest");
+  }
+
+}
+
+static gboolean
+gst_mss_demux_process_manifest (GstAdaptiveDemux * demux, GstBuffer * buf)
+{
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (demux);
+
+  gst_mss_demux_update_base_url (mssdemux);
+
+  mssdemux->manifest = gst_mss_manifest_new (buf);
+  if (!mssdemux->manifest) {
+    GST_ELEMENT_ERROR (mssdemux, STREAM, FORMAT, ("Bad manifest file"),
+        ("Xml manifest file couldn't be parsed"));
+    return FALSE;
+  }
+  return gst_mss_demux_setup_streams (demux);
+}
+
+static gboolean
+gst_mss_demux_stream_select_bitrate (GstAdaptiveDemux2Stream * stream,
+    guint64 bitrate)
+{
+  GstMssDemuxStream *mssstream = (GstMssDemuxStream *) stream;
+  gboolean ret = FALSE;
+
+  GST_DEBUG_OBJECT (stream,
+      "Using stream download bitrate %" G_GUINT64_FORMAT, bitrate);
+
+  if (gst_mss_stream_select_bitrate (mssstream->manifest_stream,
+          bitrate / MAX (1.0, ABS (stream->demux->segment.rate)))) {
+    GstCaps *caps;
+    GstCaps *msscaps;
+    GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (stream->demux);
+    const gchar *protection_system_id =
+        gst_mss_manifest_get_protection_system_id (mssdemux->manifest);
+    const gchar *protection_data =
+        gst_mss_manifest_get_protection_data (mssdemux->manifest);
+    gboolean protected = protection_system_id && protection_data;
+
+    caps = gst_mss_stream_get_caps (mssstream->manifest_stream);
+
+    GST_DEBUG_OBJECT (stream,
+        "Starting streams reconfiguration due to bitrate changes");
+
+    if (protected) {
+      const gchar *sys_ids[2] = { protection_system_id, NULL };
+      const gchar *selected_system = gst_protection_select_system (sys_ids);
+
+      if (!selected_system) {
+        GST_ERROR_OBJECT (mssdemux, "stream is protected, but no "
+            "suitable decryptor element has been found");
+        gst_caps_unref (caps);
+        return FALSE;
+      }
+
+      gst_mss_demux_apply_protection_system (caps, selected_system);
+    }
+
+    msscaps = create_mss_caps (mssstream, caps);
+
+    GST_DEBUG_OBJECT (stream,
+        "Stream changed bitrate to %" G_GUINT64_FORMAT " caps: %"
+        GST_PTR_FORMAT,
+        gst_mss_stream_get_current_bitrate (mssstream->manifest_stream), caps);
+
+    gst_caps_unref (caps);
+
+    gst_adaptive_demux2_stream_set_caps (stream, msscaps);
+    ret = TRUE;
+    GST_DEBUG_OBJECT (stream, "Finished streams reconfiguration");
+  }
+  return ret;
+}
+
+#define SEEK_UPDATES_PLAY_POSITION(r, start_type, stop_type) \
+  ((r >= 0 && start_type != GST_SEEK_TYPE_NONE) || \
+   (r < 0 && stop_type != GST_SEEK_TYPE_NONE))
+
+static gboolean
+gst_mss_demux_seek (GstAdaptiveDemux * demux, GstEvent * seek)
+{
+  gdouble rate;
+  GstFormat format;
+  GstSeekFlags flags;
+  GstSeekType start_type, stop_type;
+  gint64 start, stop;
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (demux);
+
+  gst_event_parse_seek (seek, &rate, &format, &flags, &start_type, &start,
+      &stop_type, &stop);
+
+  GST_DEBUG_OBJECT (mssdemux,
+      "seek event, rate: %f start: %" GST_TIME_FORMAT " stop: %"
+      GST_TIME_FORMAT, rate, GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+
+  if (SEEK_UPDATES_PLAY_POSITION (rate, start_type, stop_type)) {
+    if (rate >= 0)
+      gst_mss_manifest_seek (mssdemux->manifest, rate >= 0, start);
+    else
+      gst_mss_manifest_seek (mssdemux->manifest, rate >= 0, stop);
+  }
+
+  return TRUE;
+}
+
+static gboolean
+gst_mss_demux_stream_has_next_fragment (GstAdaptiveDemux2Stream * stream)
+{
+  GstMssDemuxStream *mssstream = (GstMssDemuxStream *) stream;
+
+  return gst_mss_stream_has_next_fragment (mssstream->manifest_stream);
+}
+
+static gint64
+gst_mss_demux_get_manifest_update_interval (GstAdaptiveDemux * demux)
+{
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (demux);
+  GstClockTime interval;
+
+  /* Not much information about this in the MSS spec. It seems that
+   * the fragments contain an UUID box that should tell the next
+   * fragments time and duration so one wouldn't need to fetch
+   * the Manifest again, but we need a fallback here. So use 2 times
+   * the current fragment duration */
+
+  interval = gst_mss_manifest_get_min_fragment_duration (mssdemux->manifest);
+  if (!GST_CLOCK_TIME_IS_VALID (interval))
+    interval = 2 * GST_SECOND;  /* default to 2 seconds */
+
+  interval = 2 * (interval / GST_USECOND);
+
+  return interval;
+}
+
+static GstClockTime
+gst_mss_demux_stream_get_fragment_waiting_time (GstAdaptiveDemux2Stream *
+    stream)
+{
+  /* Wait a second for live streams so we don't try premature fragments downloading */
+  return GST_SECOND;
+}
+
+static GstFlowReturn
+gst_mss_demux_update_manifest_data (GstAdaptiveDemux * demux,
+    GstBuffer * buffer)
+{
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (demux);
+
+  gst_mss_demux_update_base_url (mssdemux);
+
+  gst_mss_manifest_reload_fragments (mssdemux->manifest, buffer);
+  return GST_FLOW_OK;
+}
+
+static gboolean
+gst_mss_demux_get_live_seek_range (GstAdaptiveDemux * demux, gint64 * start,
+    gint64 * stop)
+{
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (demux);
+
+  return gst_mss_manifest_get_live_seek_range (mssdemux->manifest, start, stop);
+}
+
+static GstFlowReturn
+gst_mss_demux_data_received (GstAdaptiveDemux * demux,
+    GstAdaptiveDemux2Stream * stream, GstBuffer * buffer)
+{
+  GstMssDemux *mssdemux = GST_MSS_DEMUX_CAST (demux);
+  GstMssDemuxStream *mssstream = (GstMssDemuxStream *) stream;
+  gsize available;
+
+  if (!gst_mss_manifest_is_live (mssdemux->manifest)) {
+    return gst_adaptive_demux2_stream_push_buffer (stream, buffer);
+  }
+
+  if (gst_mss_stream_fragment_parsing_needed (mssstream->manifest_stream)) {
+    gst_mss_manifest_live_adapter_push (mssstream->manifest_stream, buffer);
+    available =
+        gst_mss_manifest_live_adapter_available (mssstream->manifest_stream);
+    // FIXME: try to reduce this minimal size.
+    if (available < 4096) {
+      return GST_FLOW_OK;
+    } else {
+      GST_LOG_OBJECT (stream, "enough data, parsing fragment.");
+      buffer =
+          gst_mss_manifest_live_adapter_take_buffer (mssstream->manifest_stream,
+          available);
+      gst_mss_stream_parse_fragment (mssstream->manifest_stream, buffer);
+    }
+  }
+
+  return gst_adaptive_demux2_stream_push_buffer (stream, buffer);
+}
+
+static gboolean
+gst_mss_demux_requires_periodical_playlist_update (GstAdaptiveDemux * demux)
+{
+  return TRUE;
+}
+
+GstStreamType
+gst_stream_type_from_mss_type (GstMssStreamType mtype)
+{
+  switch (mtype) {
+    case MSS_STREAM_TYPE_AUDIO:
+      return GST_STREAM_TYPE_AUDIO;
+    case MSS_STREAM_TYPE_VIDEO:
+      return GST_STREAM_TYPE_VIDEO;
+    default:
+      return GST_STREAM_TYPE_UNKNOWN;
+  }
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssdemux.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssdemux.h
new file mode 100644 (file)
index 0000000..2d12119
--- /dev/null
@@ -0,0 +1,86 @@
+/* GStreamer
+ * Copyright (C) 2012 Smart TV Alliance
+ *  Author: Thiago Sousa Santos <thiago.sousa.santos@collabora.com>, Collabora Ltd.
+ *
+ * gstmssdemux.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_MSSDEMUX_H__
+#define __GST_MSSDEMUX_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/base/gstdataqueue.h>
+#include <gst/gstprotection.h>
+#include "gstmssmanifest.h"
+#include "../gstadaptivedemux.h"
+
+G_BEGIN_DECLS
+
+GST_DEBUG_CATEGORY_EXTERN (mssdemux2_debug);
+#define GST_CAT_DEFAULT mssdemux2_debug
+
+#define GST_TYPE_MSS_DEMUX2 \
+  (gst_mss_demux2_get_type())
+#define GST_MSS_DEMUX(obj) \
+  (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MSS_DEMUX,GstMssDemux))
+#define GST_MSS_DEMUX_CLASS(klass) \
+  (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MSS_DEMUX,GstMssDemuxClass))
+#define GST_IS_MSS_DEMUX(obj) \
+  (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MSS_DEMUX))
+#define GST_IS_MSS_DEMUX_CLASS(klass) \
+  (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MSS_DEMUX))
+
+#define GST_MSS_DEMUX_CAST(obj) ((GstMssDemux *)(obj))
+
+#define GST_TYPE_MSS_DEMUX_STREAM \
+  (gst_mss_demux_stream_get_type())
+#define GST_MSS_DEMUX_STREAM(obj) \
+  (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MSS_DEMUX_STREAM,GstMssDemuxStream))
+#define GST_MSS_DEMUX_STREAM_CAST(obj) ((GstMssDemuxStream *)obj)
+
+typedef struct _GstMssDemuxStream GstMssDemuxStream;
+typedef GstAdaptiveDemux2StreamClass GstMssDemuxStreamClass;
+
+typedef struct _GstMssDemux2 GstMssDemux;
+typedef struct _GstMssDemux2Class GstMssDemuxClass;
+
+struct _GstMssDemuxStream {
+  GstAdaptiveDemux2Stream parent;
+
+  GstMssStream *manifest_stream;
+};
+
+struct _GstMssDemux2 {
+  GstAdaptiveDemux bin;
+
+  GstMssManifest *manifest;
+  gchar *base_url;
+};
+
+struct _GstMssDemux2Class {
+  GstAdaptiveDemuxClass parent_class;
+};
+
+GType gst_mss_demux2_get_type (void);
+GType gst_mss_demux_stream_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (mssdemux2);
+G_END_DECLS
+
+#endif /* __GST_MSSDEMUX_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssfragmentparser.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssfragmentparser.c
new file mode 100644 (file)
index 0000000..b3c8e48
--- /dev/null
@@ -0,0 +1,122 @@
+/*
+ * Microsoft Smooth-Streaming fragment parsing library
+ *
+ * gstmssfragmentparser.h
+ *
+ * Copyright (C) 2016 Igalia S.L
+ * Copyright (C) 2016 Metrological
+ *   Author: Philippe Normand <philn@igalia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include "gstmssfragmentparser.h"
+#include <gst/base/gstbytereader.h>
+#include <string.h>
+
+GST_DEBUG_CATEGORY_EXTERN (mssdemux2_debug);
+#define GST_CAT_DEFAULT mssdemux2_debug
+
+void
+gst_mss_fragment_parser_init (GstMssFragmentParser * parser)
+{
+  parser->status = GST_MSS_FRAGMENT_HEADER_PARSER_INIT;
+}
+
+void
+gst_mss_fragment_parser_clear (GstMssFragmentParser * parser)
+{
+  if (parser->moof)
+    gst_isoff_moof_box_free (parser->moof);
+  parser->moof = NULL;
+  parser->current_fourcc = 0;
+}
+
+gboolean
+gst_mss_fragment_parser_add_buffer (GstMssFragmentParser * parser,
+    GstBuffer * buffer)
+{
+  GstByteReader reader;
+  GstMapInfo info;
+  guint64 size;
+  guint32 fourcc;
+  guint header_size;
+  gboolean error = FALSE;
+
+  if (!gst_buffer_map (buffer, &info, GST_MAP_READ)) {
+    return FALSE;
+  }
+
+  gst_byte_reader_init (&reader, info.data, info.size);
+  GST_TRACE ("Total buffer size: %u", gst_byte_reader_get_size (&reader));
+
+  do {
+    parser->current_fourcc = 0;
+
+    if (!gst_isoff_parse_box_header (&reader, &fourcc, NULL, &header_size,
+            &size)) {
+      break;
+    }
+
+    parser->current_fourcc = fourcc;
+
+    GST_LOG ("box %" GST_FOURCC_FORMAT " size %" G_GUINT64_FORMAT,
+        GST_FOURCC_ARGS (fourcc), size);
+
+    parser->current_fourcc = fourcc;
+
+    if (parser->current_fourcc == GST_ISOFF_FOURCC_MOOF) {
+      GstByteReader sub_reader;
+
+      g_assert (parser->moof == NULL);
+      gst_byte_reader_get_sub_reader (&reader, &sub_reader, size - header_size);
+      parser->moof = gst_isoff_moof_box_parse (&sub_reader);
+      if (parser->moof == NULL) {
+        GST_ERROR ("Failed to parse moof");
+        error = TRUE;
+      }
+    } else if (parser->current_fourcc == GST_ISOFF_FOURCC_MDAT) {
+      goto beach;
+    } else {
+      gst_byte_reader_skip (&reader, size - header_size);
+    }
+  } while (gst_byte_reader_get_remaining (&reader) > 0);
+
+beach:
+
+  /* Do sanity check */
+  if (parser->current_fourcc != GST_ISOFF_FOURCC_MDAT || !parser->moof ||
+      parser->moof->traf->len == 0)
+    error = TRUE;
+
+  if (!error) {
+    GstTrafBox *traf = &g_array_index (parser->moof->traf, GstTrafBox, 0);
+    if (!traf->tfxd) {
+      GST_ERROR ("no tfxd box");
+      error = TRUE;
+    } else if (!traf->tfrf) {
+      GST_ERROR ("no tfrf box");
+      error = TRUE;
+    }
+  }
+
+  if (!error)
+    parser->status = GST_MSS_FRAGMENT_HEADER_PARSER_FINISHED;
+
+  GST_LOG ("Fragment parsing successful: %s", error ? "no" : "yes");
+  gst_buffer_unmap (buffer, &info);
+  return !error;
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssfragmentparser.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssfragmentparser.h
new file mode 100644 (file)
index 0000000..2a53f88
--- /dev/null
@@ -0,0 +1,53 @@
+/*
+ * Microsoft Smooth-Streaming fragment parsing library
+ *
+ * gstmssfragmentparser.h
+ *
+ * Copyright (C) 2016 Igalia S.L
+ * Copyright (C) 2016 Metrological
+ *   Author: Philippe Normand <philn@igalia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_MSS_FRAGMENT_PARSER_H__
+#define __GST_MSS_FRAGMENT_PARSER_H__
+
+#include <gst/gst.h>
+#include "../gstisoff.h"
+
+G_BEGIN_DECLS
+
+typedef enum _GstFragmentHeaderParserStatus
+{
+  GST_MSS_FRAGMENT_HEADER_PARSER_INIT,
+  GST_MSS_FRAGMENT_HEADER_PARSER_FINISHED
+} GstFragmentHeaderParserStatus;
+
+typedef struct _GstMssFragmentParser
+{
+  GstFragmentHeaderParserStatus status;
+  GstMoofBox *moof;
+  guint32 current_fourcc;
+} GstMssFragmentParser;
+
+void gst_mss_fragment_parser_init (GstMssFragmentParser * parser);
+void gst_mss_fragment_parser_clear (GstMssFragmentParser * parser);
+gboolean gst_mss_fragment_parser_add_buffer (GstMssFragmentParser * parser, GstBuffer * buf);
+
+G_END_DECLS
+
+#endif /* __GST_MSS_FRAGMENT_PARSER_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssmanifest.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssmanifest.c
new file mode 100644 (file)
index 0000000..514033a
--- /dev/null
@@ -0,0 +1,1650 @@
+/* GStreamer
+ * Copyright (C) 2012 Smart TV Alliance
+ * Copyright (C) 2016 Igalia S.L
+ * Copyright (C) 2016 Metrological
+ *  Author: Thiago Sousa Santos <thiago.sousa.santos@collabora.com>, Collabora Ltd.
+ *
+ * gstmssmanifest.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <glib.h>
+#include <string.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <libxml/parser.h>
+#include <libxml/tree.h>
+
+#include "gstmssmanifest.h"
+#include "gstmssfragmentparser.h"
+
+GST_DEBUG_CATEGORY_EXTERN (mssdemux2_debug);
+#define GST_CAT_DEFAULT mssdemux2_debug
+
+#define DEFAULT_TIMESCALE             10000000
+
+#define MSS_NODE_STREAM_FRAGMENT      "c"
+#define MSS_NODE_STREAM_QUALITY       "QualityLevel"
+
+#define MSS_PROP_BITRATE              "Bitrate"
+#define MSS_PROP_DURATION             "d"
+#define MSS_PROP_DVR_WINDOW_LENGTH    "DVRWindowLength"
+#define MSS_PROP_LANGUAGE             "Language"
+#define MSS_PROP_NUMBER               "n"
+#define MSS_PROP_REPETITIONS          "r"
+#define MSS_PROP_STREAM_DURATION      "Duration"
+#define MSS_PROP_TIME                 "t"
+#define MSS_PROP_TIMESCALE            "TimeScale"
+#define MSS_PROP_URL                  "Url"
+
+#define GST_MSSMANIFEST_LIVE_MIN_FRAGMENT_DISTANCE 3
+
+typedef struct _GstMssStreamFragment
+{
+  guint number;
+  guint64 time;
+  guint64 duration;
+  guint repetitions;
+} GstMssStreamFragment;
+
+typedef struct _GstMssStreamQuality
+{
+  xmlNodePtr xmlnode;
+
+  gchar *bitrate_str;
+  guint64 bitrate;
+} GstMssStreamQuality;
+
+struct _GstMssStream
+{
+  xmlNodePtr xmlnode;
+
+  gboolean active;              /* if the stream is currently being used */
+  gint selectedQualityIndex;
+
+  gboolean has_live_fragments;
+  GstAdapter *live_adapter;
+
+  GList *fragments;
+  GList *qualities;
+
+  gchar *url;
+  gchar *lang;
+
+  GstMssFragmentParser fragment_parser;
+
+  guint fragment_repetition_index;
+  GList *current_fragment;
+  GList *current_quality;
+
+  /* TODO move this to somewhere static */
+  GRegex *regex_bitrate;
+  GRegex *regex_position;
+};
+
+struct _GstMssManifest
+{
+  xmlDocPtr xml;
+  xmlNodePtr xmlrootnode;
+
+  gboolean is_live;
+  gint64 dvr_window;
+  guint64 look_ahead_fragment_count;
+
+  GString *protection_system_id;
+  gchar *protection_data;
+
+  GSList *streams;
+};
+
+/* For parsing and building a fragments list */
+typedef struct _GstMssFragmentListBuilder
+{
+  GList *fragments;
+
+  GstMssStreamFragment *previous_fragment;
+  guint fragment_number;
+  guint64 fragment_time_accum;
+} GstMssFragmentListBuilder;
+
+static void
+gst_mss_fragment_list_builder_init (GstMssFragmentListBuilder * builder)
+{
+  builder->fragments = NULL;
+  builder->previous_fragment = NULL;
+  builder->fragment_time_accum = 0;
+  builder->fragment_number = 0;
+}
+
+static void
+gst_mss_fragment_list_builder_add (GstMssFragmentListBuilder * builder,
+    xmlNodePtr node)
+{
+  gchar *duration_str;
+  gchar *time_str;
+  gchar *seqnum_str;
+  gchar *repetition_str;
+  GstMssStreamFragment *fragment = g_new (GstMssStreamFragment, 1);
+
+  duration_str = (gchar *) xmlGetProp (node, (xmlChar *) MSS_PROP_DURATION);
+  time_str = (gchar *) xmlGetProp (node, (xmlChar *) MSS_PROP_TIME);
+  seqnum_str = (gchar *) xmlGetProp (node, (xmlChar *) MSS_PROP_NUMBER);
+  repetition_str =
+      (gchar *) xmlGetProp (node, (xmlChar *) MSS_PROP_REPETITIONS);
+
+  /* use the node's seq number or use the previous + 1 */
+  if (seqnum_str) {
+    fragment->number = g_ascii_strtoull (seqnum_str, NULL, 10);
+    xmlFree (seqnum_str);
+    builder->fragment_number = fragment->number;
+  } else {
+    fragment->number = builder->fragment_number;
+  }
+  builder->fragment_number = fragment->number + 1;
+
+  if (repetition_str) {
+    fragment->repetitions = g_ascii_strtoull (repetition_str, NULL, 10);
+    xmlFree (repetition_str);
+  } else {
+    fragment->repetitions = 1;
+  }
+
+  if (time_str) {
+    fragment->time = g_ascii_strtoull (time_str, NULL, 10);
+
+    xmlFree (time_str);
+    builder->fragment_time_accum = fragment->time;
+  } else {
+    fragment->time = builder->fragment_time_accum;
+  }
+
+  /* if we have a previous fragment, means we need to set its duration */
+  if (builder->previous_fragment)
+    builder->previous_fragment->duration =
+        (fragment->time -
+        builder->previous_fragment->time) /
+        builder->previous_fragment->repetitions;
+
+  if (duration_str) {
+    fragment->duration = g_ascii_strtoull (duration_str, NULL, 10);
+
+    builder->previous_fragment = NULL;
+    builder->fragment_time_accum += fragment->duration * fragment->repetitions;
+    xmlFree (duration_str);
+  } else {
+    /* store to set the duration at the next iteration */
+    builder->previous_fragment = fragment;
+  }
+
+  /* we reverse it later */
+  builder->fragments = g_list_prepend (builder->fragments, fragment);
+  GST_LOG ("Adding fragment number: %u, time: %" G_GUINT64_FORMAT
+      ", duration: %" G_GUINT64_FORMAT ", repetitions: %u",
+      fragment->number, fragment->time, fragment->duration,
+      fragment->repetitions);
+}
+
+static GstBuffer *gst_buffer_from_hex_string (const gchar * s);
+
+static gboolean
+node_has_type (xmlNodePtr node, const gchar * name)
+{
+  return strcmp ((gchar *) node->name, name) == 0;
+}
+
+static GstMssStreamQuality *
+gst_mss_stream_quality_new (xmlNodePtr node)
+{
+  GstMssStreamQuality *q = g_slice_new (GstMssStreamQuality);
+
+  q->xmlnode = node;
+  q->bitrate_str = (gchar *) xmlGetProp (node, (xmlChar *) MSS_PROP_BITRATE);
+
+  if (q->bitrate_str != NULL)
+    q->bitrate = g_ascii_strtoull (q->bitrate_str, NULL, 10);
+  else
+    q->bitrate = 0;
+
+  return q;
+}
+
+static void
+gst_mss_stream_quality_free (GstMssStreamQuality * quality)
+{
+  g_return_if_fail (quality != NULL);
+
+  xmlFree (quality->bitrate_str);
+  g_slice_free (GstMssStreamQuality, quality);
+}
+
+static gint
+compare_bitrate (GstMssStreamQuality * a, GstMssStreamQuality * b)
+{
+  if (a->bitrate > b->bitrate)
+    return 1;
+  if (a->bitrate < b->bitrate)
+    return -1;
+  return 0;
+
+}
+
+static void
+_gst_mss_stream_init (GstMssManifest * manifest, GstMssStream * stream,
+    xmlNodePtr node)
+{
+  xmlNodePtr iter;
+  GstMssFragmentListBuilder builder;
+
+  gst_mss_fragment_list_builder_init (&builder);
+
+  stream->xmlnode = node;
+
+  /* get the base url path generator */
+  stream->url = (gchar *) xmlGetProp (node, (xmlChar *) MSS_PROP_URL);
+  stream->lang = (gchar *) xmlGetProp (node, (xmlChar *) MSS_PROP_LANGUAGE);
+
+  /* for live playback each fragment usually has timing
+   * information for the few next look-ahead fragments so the
+   * playlist can be built incrementally from the first fragment
+   * of the manifest.
+   */
+
+  GST_DEBUG ("Live stream: %s, look-ahead fragments: %" G_GUINT64_FORMAT,
+      manifest->is_live ? "yes" : "no", manifest->look_ahead_fragment_count);
+  stream->has_live_fragments = manifest->is_live
+      && manifest->look_ahead_fragment_count;
+
+  for (iter = node->children; iter; iter = iter->next) {
+    if (node_has_type (iter, MSS_NODE_STREAM_FRAGMENT)) {
+      gst_mss_fragment_list_builder_add (&builder, iter);
+    } else if (node_has_type (iter, MSS_NODE_STREAM_QUALITY)) {
+      GstMssStreamQuality *quality = gst_mss_stream_quality_new (iter);
+      stream->qualities = g_list_prepend (stream->qualities, quality);
+    } else {
+      /* TODO gst log this */
+    }
+  }
+
+  if (stream->has_live_fragments) {
+    stream->live_adapter = gst_adapter_new ();
+  }
+
+  if (builder.fragments) {
+    stream->fragments = g_list_reverse (builder.fragments);
+    if (manifest->is_live) {
+      GList *iter = g_list_last (stream->fragments);
+      gint i;
+
+      for (i = 0; i < GST_MSSMANIFEST_LIVE_MIN_FRAGMENT_DISTANCE; i++) {
+        if (g_list_previous (iter)) {
+          iter = g_list_previous (iter);
+        } else {
+          break;
+        }
+      }
+      stream->current_fragment = iter;
+    } else {
+      stream->current_fragment = stream->fragments;
+    }
+  }
+
+  /* order them from smaller to bigger based on bitrates */
+  stream->qualities =
+      g_list_sort (stream->qualities, (GCompareFunc) compare_bitrate);
+  stream->current_quality = stream->qualities;
+
+  stream->regex_bitrate = g_regex_new ("\\{[Bb]itrate\\}", 0, 0, NULL);
+  stream->regex_position = g_regex_new ("\\{start[ _]time\\}", 0, 0, NULL);
+
+  gst_mss_fragment_parser_init (&stream->fragment_parser);
+}
+
+
+static void
+_gst_mss_parse_protection (GstMssManifest * manifest,
+    xmlNodePtr protection_node)
+{
+  xmlNodePtr nodeiter;
+
+  for (nodeiter = protection_node->children; nodeiter;
+      nodeiter = nodeiter->next) {
+    if (nodeiter->type == XML_ELEMENT_NODE
+        && (strcmp ((const char *) nodeiter->name, "ProtectionHeader") == 0)) {
+      xmlChar *system_id_attribute =
+          xmlGetProp (nodeiter, (xmlChar *) "SystemID");
+      gchar *value = (gchar *) system_id_attribute;
+      int id_len = strlen (value);
+      GString *system_id;
+
+      if (value[0] == '{') {
+        value++;
+        id_len--;
+      }
+
+      system_id = g_string_new (value);
+      system_id = g_string_ascii_down (system_id);
+      if (value[id_len - 1] == '}')
+        system_id = g_string_truncate (system_id, id_len - 1);
+
+      manifest->protection_system_id = system_id;
+      manifest->protection_data = (gchar *) xmlNodeGetContent (nodeiter);
+      xmlFree (system_id_attribute);
+      break;
+    }
+  }
+}
+
+GstMssManifest *
+gst_mss_manifest_new (GstBuffer * data)
+{
+  GstMssManifest *manifest;
+  xmlNodePtr root;
+  xmlNodePtr nodeiter;
+  gchar *live_str;
+  GstMapInfo mapinfo;
+  gchar *look_ahead_fragment_count_str;
+
+  if (!gst_buffer_map (data, &mapinfo, GST_MAP_READ)) {
+    return NULL;
+  }
+
+  manifest = g_malloc0 (sizeof (GstMssManifest));
+
+  manifest->xml = xmlReadMemory ((const gchar *) mapinfo.data,
+      mapinfo.size, "manifest", NULL, 0);
+  root = manifest->xmlrootnode = xmlDocGetRootElement (manifest->xml);
+  if (root == NULL) {
+    GST_WARNING ("No root node ! Invalid manifest");
+    gst_mss_manifest_free (manifest);
+    return NULL;
+  }
+
+  live_str = (gchar *) xmlGetProp (root, (xmlChar *) "IsLive");
+  if (live_str) {
+    manifest->is_live = g_ascii_strcasecmp (live_str, "true") == 0;
+    xmlFree (live_str);
+  }
+
+  /* the entire file is always available for non-live streams */
+  if (!manifest->is_live) {
+    manifest->dvr_window = 0;
+    manifest->look_ahead_fragment_count = 0;
+  } else {
+    /* if 0, or non-existent, the length is infinite */
+    gchar *dvr_window_str = (gchar *) xmlGetProp (root,
+        (xmlChar *) MSS_PROP_DVR_WINDOW_LENGTH);
+    if (dvr_window_str) {
+      manifest->dvr_window = g_ascii_strtoull (dvr_window_str, NULL, 10);
+      xmlFree (dvr_window_str);
+      if (manifest->dvr_window <= 0) {
+        manifest->dvr_window = 0;
+      }
+    }
+
+    look_ahead_fragment_count_str =
+        (gchar *) xmlGetProp (root, (xmlChar *) "LookAheadFragmentCount");
+    if (look_ahead_fragment_count_str) {
+      manifest->look_ahead_fragment_count =
+          g_ascii_strtoull (look_ahead_fragment_count_str, NULL, 10);
+      xmlFree (look_ahead_fragment_count_str);
+      if (manifest->look_ahead_fragment_count <= 0) {
+        manifest->look_ahead_fragment_count = 0;
+      }
+    }
+  }
+
+  for (nodeiter = root->children; nodeiter; nodeiter = nodeiter->next) {
+    if (nodeiter->type == XML_ELEMENT_NODE
+        && (strcmp ((const char *) nodeiter->name, "StreamIndex") == 0)) {
+      GstMssStream *stream = g_new0 (GstMssStream, 1);
+
+      manifest->streams = g_slist_append (manifest->streams, stream);
+      _gst_mss_stream_init (manifest, stream, nodeiter);
+    }
+
+    if (nodeiter->type == XML_ELEMENT_NODE
+        && (strcmp ((const char *) nodeiter->name, "Protection") == 0)) {
+      _gst_mss_parse_protection (manifest, nodeiter);
+    }
+  }
+
+  gst_buffer_unmap (data, &mapinfo);
+
+  return manifest;
+}
+
+static void
+gst_mss_stream_free (GstMssStream * stream)
+{
+  if (stream->live_adapter) {
+    gst_adapter_clear (stream->live_adapter);
+    g_object_unref (stream->live_adapter);
+  }
+
+  g_list_free_full (stream->fragments, g_free);
+  g_list_free_full (stream->qualities,
+      (GDestroyNotify) gst_mss_stream_quality_free);
+  xmlFree (stream->url);
+  xmlFree (stream->lang);
+  g_regex_unref (stream->regex_position);
+  g_regex_unref (stream->regex_bitrate);
+  gst_mss_fragment_parser_clear (&stream->fragment_parser);
+  g_free (stream);
+}
+
+void
+gst_mss_manifest_free (GstMssManifest * manifest)
+{
+  g_return_if_fail (manifest != NULL);
+
+  g_slist_free_full (manifest->streams, (GDestroyNotify) gst_mss_stream_free);
+
+  if (manifest->protection_system_id != NULL)
+    g_string_free (manifest->protection_system_id, TRUE);
+  xmlFree (manifest->protection_data);
+
+  xmlFreeDoc (manifest->xml);
+  g_free (manifest);
+}
+
+const gchar *
+gst_mss_manifest_get_protection_system_id (GstMssManifest * manifest)
+{
+  if (manifest->protection_system_id != NULL)
+    return manifest->protection_system_id->str;
+  return NULL;
+}
+
+const gchar *
+gst_mss_manifest_get_protection_data (GstMssManifest * manifest)
+{
+  return manifest->protection_data;
+}
+
+GSList *
+gst_mss_manifest_get_streams (GstMssManifest * manifest)
+{
+  return manifest->streams;
+}
+
+GstMssStreamType
+gst_mss_stream_get_type (GstMssStream * stream)
+{
+  gchar *prop = (gchar *) xmlGetProp (stream->xmlnode, (xmlChar *) "Type");
+  GstMssStreamType ret = MSS_STREAM_TYPE_UNKNOWN;
+
+  if (prop == NULL)
+    return MSS_STREAM_TYPE_UNKNOWN;
+
+  if (strcmp (prop, "video") == 0) {
+    ret = MSS_STREAM_TYPE_VIDEO;
+  } else if (strcmp (prop, "audio") == 0) {
+    ret = MSS_STREAM_TYPE_AUDIO;
+  } else {
+    GST_DEBUG ("Unsupported stream type: %s", prop);
+  }
+  xmlFree (prop);
+  return ret;
+}
+
+static GstCaps *
+_gst_mss_stream_video_caps_from_fourcc (gchar * fourcc)
+{
+  if (!fourcc)
+    return NULL;
+
+  if (strcmp (fourcc, "H264") == 0 || strcmp (fourcc, "AVC1") == 0) {
+    return gst_caps_new_simple ("video/x-h264", "stream-format", G_TYPE_STRING,
+        "avc", NULL);
+  } else if (strcmp (fourcc, "WVC1") == 0) {
+    return gst_caps_new_simple ("video/x-wmv", "wmvversion", G_TYPE_INT, 3,
+        "format", G_TYPE_STRING, "WVC1", NULL);
+  }
+  return NULL;
+}
+
+static GstCaps *
+_gst_mss_stream_audio_caps_from_fourcc (gchar * fourcc)
+{
+  if (!fourcc)
+    return NULL;
+
+  if (strcmp (fourcc, "AACL") == 0) {
+    return gst_caps_new_simple ("audio/mpeg", "mpegversion", G_TYPE_INT, 4,
+        NULL);
+  } else if (strcmp (fourcc, "WmaPro") == 0 || strcmp (fourcc, "WMAP") == 0) {
+    return gst_caps_new_simple ("audio/x-wma", "wmaversion", G_TYPE_INT, 3,
+        NULL);
+  }
+  return NULL;
+}
+
+static GstCaps *
+_gst_mss_stream_audio_caps_from_audio_tag (gint audiotag)
+{
+  switch (audiotag) {
+    case 83:                   /* MP3 */
+      return gst_caps_new_simple ("audio/mpeg", "mpegversion", G_TYPE_INT, 1,
+          "layer", G_TYPE_INT, 3, NULL);
+    case 255:                  /* AAC */
+      return gst_caps_new_simple ("audio/mpeg", "mpegversion", G_TYPE_INT, 4,
+          NULL);
+    default:
+      break;
+  }
+  return NULL;
+}
+
+/* copied and adapted from h264parse */
+static GstBuffer *
+_make_h264_codec_data (GstBuffer * sps, GstBuffer * pps)
+{
+  GstBuffer *buf;
+  gint sps_size = 0, pps_size = 0, num_sps = 0, num_pps = 0;
+  guint8 profile_idc = 0, profile_comp = 0, level_idc = 0;
+  guint8 *data;
+  gint nl;
+  GstMapInfo spsinfo, ppsinfo, codecdatainfo;
+
+  if (gst_buffer_get_size (sps) < 4)
+    return NULL;
+
+  gst_buffer_map (sps, &spsinfo, GST_MAP_READ);
+  gst_buffer_map (pps, &ppsinfo, GST_MAP_READ);
+
+  sps_size += spsinfo.size + 2;
+  profile_idc = spsinfo.data[1];
+  profile_comp = spsinfo.data[2];
+  level_idc = spsinfo.data[3];
+  num_sps = 1;
+
+  pps_size += ppsinfo.size + 2;
+  num_pps = 1;
+
+  buf = gst_buffer_new_and_alloc (5 + 1 + sps_size + 1 + pps_size);
+  gst_buffer_map (buf, &codecdatainfo, GST_MAP_WRITE);
+  data = codecdatainfo.data;
+  nl = 4;
+
+  data[0] = 1;                  /* AVC Decoder Configuration Record ver. 1 */
+  data[1] = profile_idc;        /* profile_idc                             */
+  data[2] = profile_comp;       /* profile_compability                     */
+  data[3] = level_idc;          /* level_idc                               */
+  data[4] = 0xfc | (nl - 1);    /* nal_length_size_minus1                  */
+  data[5] = 0xe0 | num_sps;     /* number of SPSs */
+
+  data += 6;
+  GST_WRITE_UINT16_BE (data, spsinfo.size);
+  memcpy (data + 2, spsinfo.data, spsinfo.size);
+  data += 2 + spsinfo.size;
+
+  data[0] = num_pps;
+  data++;
+  GST_WRITE_UINT16_BE (data, ppsinfo.size);
+  memcpy (data + 2, ppsinfo.data, ppsinfo.size);
+  data += 2 + ppsinfo.size;
+
+  gst_buffer_unmap (sps, &spsinfo);
+  gst_buffer_unmap (pps, &ppsinfo);
+  gst_buffer_unmap (buf, &codecdatainfo);
+
+  return buf;
+}
+
+static void
+_gst_mss_stream_add_h264_codec_data (GstCaps * caps, const gchar * codecdatastr)
+{
+  GstBuffer *sps;
+  GstBuffer *pps;
+  GstBuffer *buffer;
+  gchar *sps_str;
+  gchar *pps_str;
+
+  /* search for the sps start */
+  if (g_str_has_prefix (codecdatastr, "00000001")) {
+    sps_str = (gchar *) codecdatastr + 8;
+  } else {
+    return;                     /* invalid mss codec data */
+  }
+
+  /* search for the pps start */
+  pps_str = g_strstr_len (sps_str, -1, "00000001");
+  if (!pps_str) {
+    return;                     /* invalid mss codec data */
+  }
+
+  pps_str[0] = '\0';
+  sps = gst_buffer_from_hex_string (sps_str);
+  pps_str[0] = '0';
+
+  pps_str = pps_str + 8;
+  pps = gst_buffer_from_hex_string (pps_str);
+
+  buffer = _make_h264_codec_data (sps, pps);
+  gst_buffer_unref (sps);
+  gst_buffer_unref (pps);
+
+  if (buffer != NULL) {
+    gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buffer, NULL);
+    gst_buffer_unref (buffer);
+  }
+}
+
+static GstCaps *
+_gst_mss_stream_video_caps_from_qualitylevel_xml (GstMssStreamQuality * q)
+{
+  xmlNodePtr node = q->xmlnode;
+  GstCaps *caps;
+  GstStructure *structure;
+  gchar *fourcc = (gchar *) xmlGetProp (node, (xmlChar *) "FourCC");
+  gchar *max_width = (gchar *) xmlGetProp (node, (xmlChar *) "MaxWidth");
+  gchar *max_height = (gchar *) xmlGetProp (node, (xmlChar *) "MaxHeight");
+  gchar *codec_data =
+      (gchar *) xmlGetProp (node, (xmlChar *) "CodecPrivateData");
+
+  if (!max_width)
+    max_width = (gchar *) xmlGetProp (node, (xmlChar *) "Width");
+  if (!max_height)
+    max_height = (gchar *) xmlGetProp (node, (xmlChar *) "Height");
+
+  caps = _gst_mss_stream_video_caps_from_fourcc (fourcc);
+  if (!caps)
+    goto end;
+
+  structure = gst_caps_get_structure (caps, 0);
+
+  if (max_width) {
+    gst_structure_set (structure, "width", G_TYPE_INT,
+        (int) g_ascii_strtoull (max_width, NULL, 10), NULL);
+  }
+  if (max_height) {
+    gst_structure_set (structure, "height", G_TYPE_INT,
+        (int) g_ascii_strtoull (max_height, NULL, 10), NULL);
+  }
+
+  if (codec_data && strlen (codec_data)) {
+    if (strcmp (fourcc, "H264") == 0 || strcmp (fourcc, "AVC1") == 0) {
+      _gst_mss_stream_add_h264_codec_data (caps, codec_data);
+    } else {
+      GstBuffer *buffer = gst_buffer_from_hex_string ((gchar *) codec_data);
+      gst_structure_set (structure, "codec_data", GST_TYPE_BUFFER, buffer,
+          NULL);
+      gst_buffer_unref (buffer);
+    }
+  }
+
+end:
+  xmlFree (fourcc);
+  xmlFree (max_width);
+  xmlFree (max_height);
+  xmlFree (codec_data);
+
+  return caps;
+}
+
+static guint8
+_frequency_index_from_sampling_rate (guint sampling_rate)
+{
+  static const guint aac_sample_rates[] = { 96000, 88200, 64000, 48000, 44100,
+    32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350
+  };
+
+  guint8 i;
+
+  for (i = 0; i < G_N_ELEMENTS (aac_sample_rates); i++) {
+    if (aac_sample_rates[i] == sampling_rate)
+      return i;
+  }
+  return 15;
+}
+
+static GstBuffer *
+_make_aacl_codec_data (guint64 sampling_rate, guint64 channels)
+{
+  GstBuffer *buf;
+  guint8 *data;
+  guint8 frequency_index;
+  guint8 buf_size;
+  GstMapInfo info;
+
+  buf_size = 2;
+  frequency_index = _frequency_index_from_sampling_rate (sampling_rate);
+  if (frequency_index == 15)
+    buf_size += 3;
+
+  buf = gst_buffer_new_and_alloc (buf_size);
+  gst_buffer_map (buf, &info, GST_MAP_WRITE);
+  data = info.data;
+
+  data[0] = 2 << 3;             /* AAC-LC object type is 2 */
+  data[0] += frequency_index >> 1;
+  data[1] = (frequency_index & 0x01) << 7;
+
+  /* Sampling rate is not in frequencies table, write manually */
+  if (frequency_index == 15) {
+    data[1] += sampling_rate >> 17;
+    data[2] = (sampling_rate >> 9) & 0xFF;
+    data[3] = (sampling_rate >> 1) & 0xFF;
+    data[4] = sampling_rate & 0x01;
+    data += 3;
+  }
+
+  data[1] += (channels & 0x0F) << 3;
+
+  gst_buffer_unmap (buf, &info);
+
+  return buf;
+}
+
+static GstCaps *
+_gst_mss_stream_audio_caps_from_qualitylevel_xml (GstMssStreamQuality * q)
+{
+  xmlNodePtr node = q->xmlnode;
+  GstCaps *caps = NULL;
+  GstStructure *structure;
+  gchar *fourcc = (gchar *) xmlGetProp (node, (xmlChar *) "FourCC");
+  gchar *audiotag = (gchar *) xmlGetProp (node, (xmlChar *) "AudioTag");
+  gchar *channels_str = (gchar *) xmlGetProp (node, (xmlChar *) "Channels");
+  gchar *rate_str = (gchar *) xmlGetProp (node, (xmlChar *) "SamplingRate");
+  gchar *depth_str = (gchar *) xmlGetProp (node, (xmlChar *) "BitsPerSample");
+  gchar *block_align_str =
+      (gchar *) xmlGetProp (node, (xmlChar *) "PacketSize");
+  gchar *codec_data_str =
+      (gchar *) xmlGetProp (node, (xmlChar *) "CodecPrivateData");
+  GstBuffer *codec_data = NULL;
+  gint depth = 0;
+  gint block_align = 0;
+  gint rate = 0;
+  gint channels = 0;
+  gint atag = 0;
+
+  if (!fourcc)                  /* sometimes the fourcc is omitted, we fallback to the Subtype in the StreamIndex node */
+    fourcc = (gchar *) xmlGetProp (node->parent, (xmlChar *) "Subtype");
+
+  if (fourcc) {
+    caps = _gst_mss_stream_audio_caps_from_fourcc (fourcc);
+  } else if (audiotag) {
+    atag = g_ascii_strtoull (audiotag, NULL, 10);
+    caps = _gst_mss_stream_audio_caps_from_audio_tag (atag);
+  }
+
+  if (!caps)
+    goto end;
+
+  structure = gst_caps_get_structure (caps, 0);
+  if (codec_data_str && strlen (codec_data_str)) {
+    codec_data = gst_buffer_from_hex_string ((gchar *) codec_data_str);
+  }
+
+  if (rate_str)
+    rate = (gint) g_ascii_strtoull (rate_str, NULL, 10);
+  if (channels_str)
+    channels = (int) g_ascii_strtoull (channels_str, NULL, 10);
+  if (depth_str)
+    depth = (gint) g_ascii_strtoull (depth_str, NULL, 10);
+  if (block_align_str)
+    block_align = (int) g_ascii_strtoull (block_align_str, NULL, 10);
+
+  if (!codec_data) {
+    gint codec_data_len;
+    codec_data_str = (gchar *) xmlGetProp (node, (xmlChar *) "WaveFormatEx");
+
+    if (codec_data_str != NULL) {
+      codec_data_len = strlen (codec_data_str) / 2;
+
+      /* a WAVEFORMATEX structure is 18 bytes */
+      if (codec_data_str && codec_data_len >= 18) {
+        GstMapInfo mapinfo;
+        codec_data = gst_buffer_from_hex_string ((gchar *) codec_data_str);
+
+        /* since this is a WAVEFORMATEX, try to get the block_align and rate */
+        gst_buffer_map (codec_data, &mapinfo, GST_MAP_READ);
+        if (!channels_str) {
+          channels = GST_READ_UINT16_LE (mapinfo.data + 2);
+        }
+        if (!rate_str) {
+          rate = GST_READ_UINT32_LE (mapinfo.data + 4);
+        }
+        if (!block_align) {
+          block_align = GST_READ_UINT16_LE (mapinfo.data + 12);
+        }
+        if (!depth) {
+          depth = GST_READ_UINT16_LE (mapinfo.data + 14);
+        }
+        gst_buffer_unmap (codec_data, &mapinfo);
+
+        /* Consume all the WAVEFORMATEX structure, and pass only the rest of
+         * the data as the codec private data */
+        gst_buffer_resize (codec_data, 18, -1);
+      } else {
+        GST_WARNING ("Dropping WaveFormatEx: data is %d bytes, "
+            "but at least 18 bytes are expected", codec_data_len);
+      }
+    }
+  }
+
+  if (!codec_data && ((fourcc && strcmp (fourcc, "AACL") == 0) || atag == 255)
+      && rate && channels) {
+    codec_data = _make_aacl_codec_data (rate, channels);
+  }
+
+  if (block_align)
+    gst_structure_set (structure, "block_align", G_TYPE_INT, block_align, NULL);
+
+  if (channels)
+    gst_structure_set (structure, "channels", G_TYPE_INT, channels, NULL);
+
+  if (rate)
+    gst_structure_set (structure, "rate", G_TYPE_INT, rate, NULL);
+
+  if (depth)
+    gst_structure_set (structure, "depth", G_TYPE_INT, depth, NULL);
+
+  if (q->bitrate)
+    gst_structure_set (structure, "bitrate", G_TYPE_INT, (int) q->bitrate,
+        NULL);
+
+  if (codec_data)
+    gst_structure_set (structure, "codec_data", GST_TYPE_BUFFER, codec_data,
+        NULL);
+
+end:
+  if (codec_data)
+    gst_buffer_unref (codec_data);
+  xmlFree (fourcc);
+  xmlFree (audiotag);
+  xmlFree (channels_str);
+  xmlFree (rate_str);
+  xmlFree (depth_str);
+  xmlFree (block_align_str);
+  xmlFree (codec_data_str);
+
+  return caps;
+}
+
+void
+gst_mss_stream_set_active (GstMssStream * stream, gboolean active)
+{
+  stream->active = active;
+}
+
+guint64
+gst_mss_stream_get_timescale (GstMssStream * stream)
+{
+  gchar *timescale;
+  guint64 ts = DEFAULT_TIMESCALE;
+
+  timescale =
+      (gchar *) xmlGetProp (stream->xmlnode, (xmlChar *) MSS_PROP_TIMESCALE);
+  if (!timescale) {
+    timescale =
+        (gchar *) xmlGetProp (stream->xmlnode->parent,
+        (xmlChar *) MSS_PROP_TIMESCALE);
+  }
+
+  if (timescale) {
+    ts = g_ascii_strtoull (timescale, NULL, 10);
+    xmlFree (timescale);
+  }
+  return ts;
+}
+
+guint64
+gst_mss_manifest_get_timescale (GstMssManifest * manifest)
+{
+  gchar *timescale;
+  guint64 ts = DEFAULT_TIMESCALE;
+
+  timescale =
+      (gchar *) xmlGetProp (manifest->xmlrootnode,
+      (xmlChar *) MSS_PROP_TIMESCALE);
+  if (timescale) {
+    ts = g_ascii_strtoull (timescale, NULL, 10);
+    xmlFree (timescale);
+  }
+  return ts;
+}
+
+guint64
+gst_mss_manifest_get_duration (GstMssManifest * manifest)
+{
+  gchar *duration;
+  guint64 dur = 0;
+
+  /* try the property */
+  duration =
+      (gchar *) xmlGetProp (manifest->xmlrootnode,
+      (xmlChar *) MSS_PROP_STREAM_DURATION);
+  if (duration) {
+    dur = g_ascii_strtoull (duration, NULL, 10);
+    xmlFree (duration);
+  }
+  /* else use the fragment list */
+  if (dur <= 0) {
+    guint64 max_dur = 0;
+    GSList *iter;
+
+    for (iter = manifest->streams; iter; iter = g_slist_next (iter)) {
+      GstMssStream *stream = iter->data;
+
+      if (stream->active) {
+        if (stream->fragments) {
+          GList *l = g_list_last (stream->fragments);
+          GstMssStreamFragment *fragment = (GstMssStreamFragment *) l->data;
+          guint64 frag_dur =
+              fragment->time + fragment->duration * fragment->repetitions;
+          max_dur = MAX (frag_dur, max_dur);
+        }
+      }
+    }
+
+    if (max_dur != 0)
+      dur = max_dur;
+  }
+
+  return dur;
+}
+
+
+/*
+ * Gets the duration in nanoseconds
+ */
+GstClockTime
+gst_mss_manifest_get_gst_duration (GstMssManifest * manifest)
+{
+  guint64 duration = -1;
+  guint64 timescale;
+  GstClockTime gstdur = GST_CLOCK_TIME_NONE;
+
+  duration = gst_mss_manifest_get_duration (manifest);
+  timescale = gst_mss_manifest_get_timescale (manifest);
+
+  if (duration != -1 && timescale != -1)
+    gstdur =
+        (GstClockTime) gst_util_uint64_scale_round (duration, GST_SECOND,
+        timescale);
+
+  return gstdur;
+}
+
+GstClockTime
+gst_mss_manifest_get_min_fragment_duration (GstMssManifest * manifest)
+{
+  GSList *iter;
+  GstClockTime dur = GST_CLOCK_TIME_NONE;
+  GstClockTime iter_dur;
+
+  for (iter = manifest->streams; iter; iter = g_slist_next (iter)) {
+    GstMssStream *stream = iter->data;
+
+    iter_dur = gst_mss_stream_get_fragment_gst_duration (stream);
+    if (iter_dur != GST_CLOCK_TIME_NONE && iter_dur != 0) {
+      if (GST_CLOCK_TIME_IS_VALID (dur)) {
+        dur = MIN (dur, iter_dur);
+      } else {
+        dur = iter_dur;
+      }
+    }
+  }
+
+  return dur;
+}
+
+GstCaps *
+gst_mss_stream_get_caps (GstMssStream * stream)
+{
+  GstMssStreamType streamtype = gst_mss_stream_get_type (stream);
+  GstMssStreamQuality *qualitylevel = stream->current_quality->data;
+  GstCaps *caps = NULL;
+
+  if (streamtype == MSS_STREAM_TYPE_VIDEO)
+    caps = _gst_mss_stream_video_caps_from_qualitylevel_xml (qualitylevel);
+  else if (streamtype == MSS_STREAM_TYPE_AUDIO)
+    caps = _gst_mss_stream_audio_caps_from_qualitylevel_xml (qualitylevel);
+
+  return caps;
+}
+
+GstFlowReturn
+gst_mss_stream_get_fragment_url (GstMssStream * stream, gchar ** url)
+{
+  gchar *tmp;
+  gchar *start_time_str;
+  guint64 time;
+  GstMssStreamFragment *fragment;
+  GstMssStreamQuality *quality = stream->current_quality->data;
+
+  g_return_val_if_fail (stream->active, GST_FLOW_ERROR);
+
+  if (stream->current_fragment == NULL) /* stream is over */
+    return GST_FLOW_EOS;
+
+  fragment = stream->current_fragment->data;
+
+  time =
+      fragment->time + fragment->duration * stream->fragment_repetition_index;
+  start_time_str = g_strdup_printf ("%" G_GUINT64_FORMAT, time);
+
+  tmp = g_regex_replace_literal (stream->regex_bitrate, stream->url,
+      strlen (stream->url), 0, quality->bitrate_str, 0, NULL);
+  *url = g_regex_replace_literal (stream->regex_position, tmp,
+      strlen (tmp), 0, start_time_str, 0, NULL);
+
+  g_free (tmp);
+  g_free (start_time_str);
+
+  if (*url == NULL)
+    return GST_FLOW_ERROR;
+
+  return GST_FLOW_OK;
+}
+
+GstClockTime
+gst_mss_stream_get_fragment_gst_timestamp (GstMssStream * stream)
+{
+  guint64 time;
+  guint64 timescale;
+  GstMssStreamFragment *fragment;
+
+  g_return_val_if_fail (stream->active, GST_CLOCK_TIME_NONE);
+
+  if (!stream->current_fragment) {
+    GList *last = g_list_last (stream->fragments);
+    if (last == NULL)
+      return GST_CLOCK_TIME_NONE;
+
+    fragment = last->data;
+    time = fragment->time + (fragment->duration * fragment->repetitions);
+  } else {
+    fragment = stream->current_fragment->data;
+    time =
+        fragment->time +
+        (fragment->duration * stream->fragment_repetition_index);
+  }
+
+  timescale = gst_mss_stream_get_timescale (stream);
+  return (GstClockTime) gst_util_uint64_scale_round (time, GST_SECOND,
+      timescale);
+}
+
+GstClockTime
+gst_mss_stream_get_fragment_gst_duration (GstMssStream * stream)
+{
+  guint64 dur;
+  guint64 timescale;
+  GstMssStreamFragment *fragment;
+
+  g_return_val_if_fail (stream->active, GST_FLOW_ERROR);
+
+  if (!stream->current_fragment)
+    return GST_CLOCK_TIME_NONE;
+
+  fragment = stream->current_fragment->data;
+
+  dur = fragment->duration;
+  timescale = gst_mss_stream_get_timescale (stream);
+  return (GstClockTime) gst_util_uint64_scale_round (dur, GST_SECOND,
+      timescale);
+}
+
+gboolean
+gst_mss_stream_has_next_fragment (GstMssStream * stream)
+{
+  g_return_val_if_fail (stream->active, FALSE);
+
+  if (stream->current_fragment == NULL)
+    return FALSE;
+
+  return TRUE;
+}
+
+GstFlowReturn
+gst_mss_stream_advance_fragment (GstMssStream * stream)
+{
+  GstMssStreamFragment *fragment;
+  const gchar *stream_type_name =
+      gst_mss_stream_type_name (gst_mss_stream_get_type (stream));
+
+  g_return_val_if_fail (stream->active, GST_FLOW_ERROR);
+
+  if (stream->current_fragment == NULL)
+    return GST_FLOW_EOS;
+
+  fragment = stream->current_fragment->data;
+  stream->fragment_repetition_index++;
+  if (stream->fragment_repetition_index < fragment->repetitions)
+    goto beach;
+
+  stream->fragment_repetition_index = 0;
+  stream->current_fragment = g_list_next (stream->current_fragment);
+
+  GST_DEBUG ("Advanced to fragment #%d on %s stream", fragment->number,
+      stream_type_name);
+  if (stream->current_fragment == NULL)
+    return GST_FLOW_EOS;
+
+beach:
+  gst_mss_fragment_parser_clear (&stream->fragment_parser);
+  gst_mss_fragment_parser_init (&stream->fragment_parser);
+  return GST_FLOW_OK;
+}
+
+GstFlowReturn
+gst_mss_stream_regress_fragment (GstMssStream * stream)
+{
+  GstMssStreamFragment *fragment;
+  g_return_val_if_fail (stream->active, GST_FLOW_ERROR);
+
+  if (stream->current_fragment == NULL)
+    return GST_FLOW_EOS;
+
+  fragment = stream->current_fragment->data;
+  if (stream->fragment_repetition_index == 0) {
+    stream->current_fragment = g_list_previous (stream->current_fragment);
+    if (stream->current_fragment == NULL)
+      return GST_FLOW_EOS;
+    fragment = stream->current_fragment->data;
+    stream->fragment_repetition_index = fragment->repetitions - 1;
+  } else {
+    stream->fragment_repetition_index--;
+  }
+  return GST_FLOW_OK;
+}
+
+const gchar *
+gst_mss_stream_type_name (GstMssStreamType streamtype)
+{
+  switch (streamtype) {
+    case MSS_STREAM_TYPE_VIDEO:
+      return "video";
+    case MSS_STREAM_TYPE_AUDIO:
+      return "audio";
+    case MSS_STREAM_TYPE_UNKNOWN:
+    default:
+      return "unknown";
+  }
+}
+
+/*
+ * Seeks all streams to the fragment that contains the set time
+ *
+ * @forward: if this is forward playback
+ * @time: time in nanoseconds
+ */
+void
+gst_mss_manifest_seek (GstMssManifest * manifest, gboolean forward, gint64 time)
+{
+  GSList *iter;
+
+  for (iter = manifest->streams; iter; iter = g_slist_next (iter)) {
+    GstMssStream *stream = iter->data;
+
+    gst_mss_manifest_live_adapter_clear (stream);
+    gst_mss_stream_seek (stream, forward, 0, time, NULL);
+  }
+}
+
+#define SNAP_AFTER(forward,flags) \
+    ((forward && (flags & GST_SEEK_FLAG_SNAP_AFTER)) || \
+    (!forward && (flags & GST_SEEK_FLAG_SNAP_BEFORE)))
+
+/*
+ * Seeks this stream to the fragment that contains the sample at time
+ *
+ * @time: time in nanoseconds
+ */
+void
+gst_mss_stream_seek (GstMssStream * stream, gboolean forward,
+    GstSeekFlags flags, gint64 time, gint64 * final_time)
+{
+  GList *iter;
+  guint64 timescale;
+  GstMssStreamFragment *fragment = NULL;
+
+  timescale = gst_mss_stream_get_timescale (stream);
+  time = gst_util_uint64_scale_round (time, timescale, GST_SECOND);
+
+  GST_DEBUG ("Stream %s seeking to %" G_GUINT64_FORMAT, stream->url, time);
+  for (iter = stream->fragments; iter; iter = g_list_next (iter)) {
+    fragment = iter->data;
+    if (fragment->time + fragment->repetitions * fragment->duration > time) {
+      stream->current_fragment = iter;
+      stream->fragment_repetition_index =
+          (time - fragment->time) / fragment->duration;
+      if (((time - fragment->time) % fragment->duration) == 0) {
+
+        /* for reverse playback, start from the previous fragment when we are
+         * exactly at a limit */
+        if (!forward)
+          stream->fragment_repetition_index--;
+      } else if (SNAP_AFTER (forward, flags))
+        stream->fragment_repetition_index++;
+
+      if (stream->fragment_repetition_index == fragment->repetitions) {
+        /* move to the next one */
+        stream->fragment_repetition_index = 0;
+        stream->current_fragment = g_list_next (iter);
+        fragment =
+            stream->current_fragment ? stream->current_fragment->data : NULL;
+
+      } else if (stream->fragment_repetition_index == -1) {
+        if (g_list_previous (iter)) {
+          stream->current_fragment = g_list_previous (iter);
+          fragment = stream->current_fragment->data;
+          g_assert (fragment);
+          stream->fragment_repetition_index = fragment->repetitions - 1;
+        } else {
+          stream->fragment_repetition_index = 0;
+        }
+      }
+
+      break;
+    }
+
+  }
+
+  GST_DEBUG ("Stream %s seeked to fragment time %" G_GUINT64_FORMAT
+      " repetition %u", stream->url,
+      fragment ? fragment->time : GST_CLOCK_TIME_NONE,
+      stream->fragment_repetition_index);
+  if (final_time) {
+    if (fragment) {
+      *final_time = gst_util_uint64_scale_round (fragment->time +
+          stream->fragment_repetition_index * fragment->duration,
+          GST_SECOND, timescale);
+    } else {
+      GstMssStreamFragment *last_fragment = g_list_last (iter)->data;
+      *final_time = gst_util_uint64_scale_round (last_fragment->time +
+          last_fragment->repetitions * last_fragment->duration,
+          GST_SECOND, timescale);
+    }
+  }
+}
+
+guint64
+gst_mss_manifest_get_current_bitrate (GstMssManifest * manifest)
+{
+  guint64 bitrate = 0;
+  GSList *iter;
+
+  for (iter = gst_mss_manifest_get_streams (manifest); iter;
+      iter = g_slist_next (iter)) {
+    GstMssStream *stream = iter->data;
+    if (stream->active && stream->current_quality) {
+      GstMssStreamQuality *q = stream->current_quality->data;
+
+      bitrate += q->bitrate;
+    }
+  }
+
+  return bitrate;
+}
+
+gboolean
+gst_mss_manifest_is_live (GstMssManifest * manifest)
+{
+  return manifest->is_live;
+}
+
+static void
+gst_mss_stream_reload_fragments (GstMssStream * stream, xmlNodePtr streamIndex)
+{
+  xmlNodePtr iter;
+  gint64 current_gst_time;
+  GstMssFragmentListBuilder builder;
+
+  current_gst_time = gst_mss_stream_get_fragment_gst_timestamp (stream);
+
+  gst_mss_fragment_list_builder_init (&builder);
+
+  GST_DEBUG ("Current position: %" GST_TIME_FORMAT,
+      GST_TIME_ARGS (current_gst_time));
+
+  for (iter = streamIndex->children; iter; iter = iter->next) {
+    if (node_has_type (iter, MSS_NODE_STREAM_FRAGMENT)) {
+      gst_mss_fragment_list_builder_add (&builder, iter);
+    } else {
+      /* TODO gst log this */
+    }
+  }
+
+  /* store the new fragments list */
+  if (builder.fragments) {
+    g_list_free_full (stream->fragments, g_free);
+    stream->fragments = g_list_reverse (builder.fragments);
+    stream->current_fragment = stream->fragments;
+    /* TODO Verify how repositioning here works for reverse
+     * playback - it might start from the wrong fragment */
+    gst_mss_stream_seek (stream, TRUE, 0, current_gst_time, NULL);
+  }
+}
+
+static void
+gst_mss_manifest_reload_fragments_from_xml (GstMssManifest * manifest,
+    xmlNodePtr root)
+{
+  xmlNodePtr nodeiter;
+  GSList *streams = manifest->streams;
+
+  /* we assume the server is providing the streams in the same order in
+   * every manifest */
+  for (nodeiter = root->children; nodeiter && streams;
+      nodeiter = nodeiter->next) {
+    if (nodeiter->type == XML_ELEMENT_NODE
+        && (strcmp ((const char *) nodeiter->name, "StreamIndex") == 0)) {
+      gst_mss_stream_reload_fragments (streams->data, nodeiter);
+      streams = g_slist_next (streams);
+    }
+  }
+}
+
+void
+gst_mss_manifest_reload_fragments (GstMssManifest * manifest, GstBuffer * data)
+{
+  xmlDocPtr xml;
+  xmlNodePtr root;
+  GstMapInfo info;
+
+  gst_buffer_map (data, &info, GST_MAP_READ);
+
+  xml = xmlReadMemory ((const gchar *) info.data,
+      info.size, "manifest", NULL, 0);
+  root = xmlDocGetRootElement (xml);
+
+  gst_mss_manifest_reload_fragments_from_xml (manifest, root);
+
+  xmlFreeDoc (xml);
+
+  gst_buffer_unmap (data, &info);
+}
+
+gboolean
+gst_mss_stream_select_bitrate (GstMssStream * stream, guint64 bitrate)
+{
+  GList *iter = stream->current_quality;
+  GList *next;
+  GstMssStreamQuality *q = iter->data;
+
+  while (q->bitrate > bitrate) {
+    next = g_list_previous (iter);
+    if (next) {
+      iter = next;
+      q = iter->data;
+    } else {
+      break;
+    }
+  }
+
+  while (q->bitrate < bitrate) {
+    GstMssStreamQuality *next_q;
+    next = g_list_next (iter);
+    if (next) {
+      next_q = next->data;
+      if (next_q->bitrate < bitrate) {
+        iter = next;
+        q = iter->data;
+      } else {
+        break;
+      }
+    } else {
+      break;
+    }
+  }
+
+  if (iter == stream->current_quality)
+    return FALSE;
+  stream->current_quality = iter;
+  return TRUE;
+}
+
+guint64
+gst_mss_stream_get_current_bitrate (GstMssStream * stream)
+{
+  GstMssStreamQuality *q;
+  if (stream->current_quality == NULL)
+    return 0;
+
+  q = stream->current_quality->data;
+  return q->bitrate;
+}
+
+/**
+ * gst_mss_manifest_change_bitrate:
+ * @manifest: the manifest
+ * @bitrate: the maximum bitrate to use (bps)
+ *
+ * Iterates over the active streams and changes their bitrates to the maximum
+ * value so that the bitrates of all streams are not larger than
+ * @bitrate.
+ *
+ * Return: %TRUE if any stream changed its bitrate
+ */
+gboolean
+gst_mss_manifest_change_bitrate (GstMssManifest * manifest, guint64 bitrate)
+{
+  gboolean ret = FALSE;
+  GSList *iter;
+
+  /* TODO This algorithm currently sets the same bitrate for all streams,
+   * it should actually use the sum of all streams bitrates to compare to
+   * the target value */
+
+  if (bitrate == 0) {
+    /* use maximum */
+    bitrate = G_MAXUINT64;
+  }
+
+  for (iter = gst_mss_manifest_get_streams (manifest); iter;
+      iter = g_slist_next (iter)) {
+    GstMssStream *stream = iter->data;
+    if (stream->active) {
+      ret = ret | gst_mss_stream_select_bitrate (stream, bitrate);
+    }
+  }
+
+  return ret;
+}
+
+static GstBuffer *
+gst_buffer_from_hex_string (const gchar * s)
+{
+  GstBuffer *buffer = NULL;
+  gint len;
+  gchar ts[3];
+  guint8 *data;
+  gint i;
+  GstMapInfo info;
+
+  len = strlen (s);
+  if (len & 1)
+    return NULL;
+
+  buffer = gst_buffer_new_and_alloc (len / 2);
+  gst_buffer_map (buffer, &info, GST_MAP_WRITE);
+  data = info.data;
+  for (i = 0; i < len / 2; i++) {
+    if (!isxdigit ((int) s[i * 2]) || !isxdigit ((int) s[i * 2 + 1])) {
+      gst_buffer_unref (buffer);
+      return NULL;
+    }
+
+    ts[0] = s[i * 2 + 0];
+    ts[1] = s[i * 2 + 1];
+    ts[2] = 0;
+
+    data[i] = (guint8) strtoul (ts, NULL, 16);
+  }
+
+  gst_buffer_unmap (buffer, &info);
+  return buffer;
+}
+
+const gchar *
+gst_mss_stream_get_lang (GstMssStream * stream)
+{
+  return stream->lang;
+}
+
+static GstClockTime
+gst_mss_manifest_get_dvr_window_length_clock_time (GstMssManifest * manifest)
+{
+  gint64 timescale;
+
+  /* the entire file is always available for non-live streams */
+  if (manifest->dvr_window == 0)
+    return GST_CLOCK_TIME_NONE;
+
+  timescale = gst_mss_manifest_get_timescale (manifest);
+  return (GstClockTime) gst_util_uint64_scale_round (manifest->dvr_window,
+      GST_SECOND, timescale);
+}
+
+static gboolean
+gst_mss_stream_get_live_seek_range (GstMssStream * stream, gint64 * start,
+    gint64 * stop)
+{
+  GList *l;
+  GstMssStreamFragment *fragment;
+  guint64 timescale = gst_mss_stream_get_timescale (stream);
+
+  g_return_val_if_fail (stream->active, FALSE);
+
+  /* XXX: assumes all the data in the stream is still available */
+  l = g_list_first (stream->fragments);
+  fragment = (GstMssStreamFragment *) l->data;
+  *start = gst_util_uint64_scale_round (fragment->time, GST_SECOND, timescale);
+
+  l = g_list_last (stream->fragments);
+  fragment = (GstMssStreamFragment *) l->data;
+  *stop = gst_util_uint64_scale_round (fragment->time + fragment->duration *
+      fragment->repetitions, GST_SECOND, timescale);
+
+  return TRUE;
+}
+
+gboolean
+gst_mss_manifest_get_live_seek_range (GstMssManifest * manifest, gint64 * start,
+    gint64 * stop)
+{
+  GSList *iter;
+  gboolean ret = FALSE;
+
+  for (iter = manifest->streams; iter; iter = g_slist_next (iter)) {
+    GstMssStream *stream = iter->data;
+
+    if (stream->active) {
+      /* FIXME: bound this correctly for multiple streams */
+      if (!(ret = gst_mss_stream_get_live_seek_range (stream, start, stop)))
+        break;
+    }
+  }
+
+  if (ret && gst_mss_manifest_is_live (manifest)) {
+    GstClockTime dvr_window =
+        gst_mss_manifest_get_dvr_window_length_clock_time (manifest);
+
+    if (GST_CLOCK_TIME_IS_VALID (dvr_window) && *stop - *start > dvr_window) {
+      *start = *stop - dvr_window;
+    }
+  }
+
+  return ret;
+}
+
+void
+gst_mss_manifest_live_adapter_push (GstMssStream * stream, GstBuffer * buffer)
+{
+  gst_adapter_push (stream->live_adapter, buffer);
+}
+
+gsize
+gst_mss_manifest_live_adapter_available (GstMssStream * stream)
+{
+  return gst_adapter_available (stream->live_adapter);
+}
+
+GstBuffer *
+gst_mss_manifest_live_adapter_take_buffer (GstMssStream * stream, gsize nbytes)
+{
+  return gst_adapter_take_buffer (stream->live_adapter, nbytes);
+}
+
+void
+gst_mss_manifest_live_adapter_clear (GstMssStream * stream)
+{
+  if (stream->live_adapter)
+    gst_adapter_clear (stream->live_adapter);
+}
+
+gboolean
+gst_mss_stream_fragment_parsing_needed (GstMssStream * stream)
+{
+  return stream->fragment_parser.status == GST_MSS_FRAGMENT_HEADER_PARSER_INIT;
+}
+
+void
+gst_mss_stream_parse_fragment (GstMssStream * stream, GstBuffer * buffer)
+{
+  const gchar *stream_type_name;
+  guint8 index;
+  GstMoofBox *moof;
+  GstTrafBox *traf;
+
+  if (!stream->has_live_fragments)
+    return;
+
+  if (!gst_mss_fragment_parser_add_buffer (&stream->fragment_parser, buffer))
+    return;
+
+  moof = stream->fragment_parser.moof;
+  traf = &g_array_index (moof->traf, GstTrafBox, 0);
+
+  stream_type_name =
+      gst_mss_stream_type_name (gst_mss_stream_get_type (stream));
+
+  for (index = 0; index < traf->tfrf->entries_count; index++) {
+    GstTfrfBoxEntry *entry =
+        &g_array_index (traf->tfrf->entries, GstTfrfBoxEntry, index);
+    GList *l = g_list_last (stream->fragments);
+    GstMssStreamFragment *last;
+    GstMssStreamFragment *fragment;
+    guint64 parsed_time = entry->time;
+    guint64 parsed_duration = entry->duration;
+
+    if (l == NULL)
+      break;
+
+    last = (GstMssStreamFragment *) l->data;
+
+    /* only add the fragment to the list if it's outside the time in the
+     * current list */
+    if (last->time >= entry->time)
+      continue;
+
+    fragment = g_new (GstMssStreamFragment, 1);
+    fragment->number = last->number + 1;
+    fragment->repetitions = 1;
+    fragment->time = parsed_time;
+    fragment->duration = parsed_duration;
+
+    stream->fragments = g_list_append (stream->fragments, fragment);
+    GST_LOG ("Adding fragment number: %u to %s stream, time: %"
+        G_GUINT64_FORMAT ", duration: %" G_GUINT64_FORMAT ", repetitions: %u",
+        fragment->number, stream_type_name, fragment->time,
+        fragment->duration, fragment->repetitions);
+  }
+}
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssmanifest.h b/subprojects/gst-plugins-good/ext/adaptivedemux2/mss/gstmssmanifest.h
new file mode 100644 (file)
index 0000000..a88f5d7
--- /dev/null
@@ -0,0 +1,85 @@
+/* GStreamer
+ * Copyright (C) 2012 Smart TV Alliance
+ *  Author: Thiago Sousa Santos <thiago.sousa.santos@collabora.com>, Collabora Ltd.
+ *
+ * gstmssmanifest.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_MSS_MANIFEST_H__
+#define __GST_MSS_MANIFEST_H__
+
+#include <glib.h>
+#include <gio/gio.h>
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstMssManifest GstMssManifest;
+typedef struct _GstMssStream GstMssStream;
+
+typedef enum _GstMssStreamType {
+  MSS_STREAM_TYPE_UNKNOWN = 0,
+  MSS_STREAM_TYPE_VIDEO = 1,
+  MSS_STREAM_TYPE_AUDIO = 2
+} GstMssStreamType;
+
+GstMssManifest * gst_mss_manifest_new (GstBuffer * data);
+void gst_mss_manifest_free (GstMssManifest * manifest);
+GSList * gst_mss_manifest_get_streams (GstMssManifest * manifest);
+guint64 gst_mss_manifest_get_timescale (GstMssManifest * manifest);
+guint64 gst_mss_manifest_get_duration (GstMssManifest * manifest);
+GstClockTime gst_mss_manifest_get_gst_duration (GstMssManifest * manifest);
+void gst_mss_manifest_seek (GstMssManifest * manifest, gboolean forward, gint64 time);
+gboolean gst_mss_manifest_change_bitrate (GstMssManifest *manifest, guint64 bitrate);
+guint64 gst_mss_manifest_get_current_bitrate (GstMssManifest * manifest);
+gboolean gst_mss_manifest_is_live (GstMssManifest * manifest);
+gint64 gst_mss_manifest_get_dvr_window_length (GstMssManifest * manifest);
+gint gst_mss_manifest_get_look_ahead_fragments_count (GstMssManifest * manifest);
+void gst_mss_manifest_reload_fragments (GstMssManifest * manifest, GstBuffer * data);
+GstClockTime gst_mss_manifest_get_min_fragment_duration (GstMssManifest * manifest);
+const gchar * gst_mss_manifest_get_protection_system_id (GstMssManifest * manifest);
+const gchar * gst_mss_manifest_get_protection_data (GstMssManifest * manifest);
+gboolean gst_mss_manifest_get_live_seek_range (GstMssManifest * manifest, gint64 * start, gint64 * stop);
+
+GstMssStreamType gst_mss_stream_get_type (GstMssStream *stream);
+GstCaps * gst_mss_stream_get_caps (GstMssStream * stream);
+gboolean gst_mss_stream_select_bitrate (GstMssStream * stream, guint64 bitrate);
+guint64 gst_mss_stream_get_current_bitrate (GstMssStream * stream);
+void gst_mss_stream_set_active (GstMssStream * stream, gboolean active);
+guint64 gst_mss_stream_get_timescale (GstMssStream * stream);
+GstFlowReturn gst_mss_stream_get_fragment_url (GstMssStream * stream, gchar ** url);
+GstClockTime gst_mss_stream_get_fragment_gst_timestamp (GstMssStream * stream);
+GstClockTime gst_mss_stream_get_fragment_gst_duration (GstMssStream * stream);
+gboolean gst_mss_stream_has_next_fragment (GstMssStream * stream);
+GstFlowReturn gst_mss_stream_advance_fragment (GstMssStream * stream);
+GstFlowReturn gst_mss_stream_regress_fragment (GstMssStream * stream);
+void gst_mss_stream_seek (GstMssStream * stream, gboolean forward, GstSeekFlags flags, gint64 time, gint64 * final_time);
+const gchar * gst_mss_stream_get_lang (GstMssStream * stream);
+
+const gchar * gst_mss_stream_type_name (GstMssStreamType streamtype);
+
+void gst_mss_manifest_live_adapter_push(GstMssStream * stream, GstBuffer * buffer);
+gsize gst_mss_manifest_live_adapter_available(GstMssStream * stream);
+GstBuffer * gst_mss_manifest_live_adapter_take_buffer(GstMssStream * stream, gsize nbytes);
+void gst_mss_manifest_live_adapter_clear (GstMssStream * stream);
+gboolean gst_mss_stream_fragment_parsing_needed(GstMssStream * stream);
+void gst_mss_stream_parse_fragment(GstMssStream * stream, GstBuffer * buffer);
+
+G_END_DECLS
+#endif /* __GST_MSS_MANIFEST_H__ */
diff --git a/subprojects/gst-plugins-good/ext/adaptivedemux2/plugin.c b/subprojects/gst-plugins-good/ext/adaptivedemux2/plugin.c
new file mode 100644 (file)
index 0000000..ade4053
--- /dev/null
@@ -0,0 +1,52 @@
+/* GStreamer
+ * Copyright (C) 2021-2022 Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#  include <config.h>
+#endif
+
+#include "dash/gstdashdemux.h"
+#include "hls/gsthlsdemux.h"
+#include "mss/gstmssdemux.h"
+#include "../soup/gstsouploader.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+  gboolean ret = TRUE;
+
+#ifndef STATIC_SOUP
+  if (!gst_soup_load_library ()) {
+    GST_WARNING ("Failed to load libsoup library");
+    return TRUE;
+  }
+#endif
+
+  ret |= GST_ELEMENT_REGISTER (hlsdemux2, plugin);
+  ret |= GST_ELEMENT_REGISTER (dashdemux2, plugin);
+  ret |= GST_ELEMENT_REGISTER (mssdemux2, plugin);
+
+  return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+    GST_VERSION_MINOR,
+    adaptivedemux2,
+    "Adaptive Streaming 2 plugin", plugin_init, VERSION,
+    GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
index 6f5594e..9bf2240 100644 (file)
@@ -1,4 +1,5 @@
 subdir('aalib')
+subdir('adaptivedemux2')
 subdir('cairo')
 subdir('flac')
 subdir('gdk_pixbuf')
index dcd33a6..ff123f1 100644 (file)
 #endif
 #endif /* G_OS_WIN32 */
 
+#ifdef BUILDING_ADAPTIVEDEMUX2
+GST_DEBUG_CATEGORY (gst_adaptivedemux_soup_debug);
+#define GST_CAT_DEFAULT gst_adaptivedemux_soup_debug
+#else
 GST_DEBUG_CATEGORY_EXTERN (gst_soup_debug);
 #define GST_CAT_DEFAULT gst_soup_debug
+#endif
+
 
 #ifndef STATIC_SOUP
 
@@ -106,6 +112,11 @@ typedef struct _GstSoupVTable
   goffset (*_soup_message_headers_get_content_length) (SoupMessageHeaders * hdrs);
   const char *(*_soup_message_headers_get_content_type) (SoupMessageHeaders * hdrs,
     GHashTable ** value);
+#ifdef BUILDING_ADAPTIVEDEMUX2
+  gboolean (*_soup_message_headers_get_content_range) (SoupMessageHeaders *hdrs, goffset *start,
+    goffset *end, goffset *total_length);
+  void (*_soup_message_headers_set_range) (SoupMessageHeaders *hdrs, goffset start, goffset end);
+#endif
   SoupEncoding (*_soup_message_headers_get_encoding) (SoupMessageHeaders * hdrs);
   const char *(*_soup_message_headers_get_one) (SoupMessageHeaders * hdrs,
     const char * name);
@@ -146,6 +157,11 @@ gst_soup_load_library (void)
 
   g_assert (g_module_supported ());
 
+#ifdef BUILDING_ADAPTIVEDEMUX2
+  GST_DEBUG_CATEGORY_INIT (gst_adaptivedemux_soup_debug, "adaptivedemux2-soup",
+      0, "adaptivedemux2-soup");
+#endif
+
 #ifdef HAVE_RTLD_NOLOAD
   {
     gpointer handle = NULL;
@@ -262,6 +278,10 @@ gst_soup_load_library (void)
       LOAD_SYMBOL (soup_message_headers_foreach);
       LOAD_SYMBOL (soup_message_headers_get_content_length);
       LOAD_SYMBOL (soup_message_headers_get_content_type);
+#ifdef BUILDING_ADAPTIVEDEMUX2
+      LOAD_SYMBOL (soup_message_headers_get_content_range);
+      LOAD_SYMBOL (soup_message_headers_set_range);
+#endif
       LOAD_SYMBOL (soup_message_headers_get_encoding);
       LOAD_SYMBOL (soup_message_headers_get_one);
       LOAD_SYMBOL (soup_message_headers_remove);
@@ -792,6 +812,34 @@ _soup_message_headers_get_content_type (SoupMessageHeaders * hdrs,
 #endif
 }
 
+#ifdef BUILDING_ADAPTIVEDEMUX2
+gboolean
+_soup_message_headers_get_content_range (SoupMessageHeaders * hdrs,
+    goffset * start, goffset * end, goffset * total_length)
+{
+#ifdef STATIC_SOUP
+  return soup_message_headers_get_content_range (hdrs, start, end,
+      total_length);
+#else
+  g_assert (gst_soup_vtable._soup_message_headers_get_content_range != NULL);
+  return gst_soup_vtable._soup_message_headers_get_content_range (hdrs, start,
+      end, total_length);
+#endif
+}
+
+void
+_soup_message_headers_set_range (SoupMessageHeaders * hdrs, goffset start,
+    goffset end)
+{
+#ifdef STATIC_SOUP
+  soup_message_headers_set_range (hdrs, start, end);
+#else
+  g_assert (gst_soup_vtable._soup_message_headers_set_range != NULL);
+  gst_soup_vtable._soup_message_headers_set_range (hdrs, start, end);
+#endif
+}
+#endif
+
 void
 _soup_auth_authenticate (SoupAuth * auth, const char *username,
     const char *password)
index a88ebe6..cbe8e41 100644 (file)
@@ -96,6 +96,14 @@ void _soup_message_disable_feature (SoupMessage *msg, GType feature_type);
 const char *_soup_message_headers_get_content_type (SoupMessageHeaders *hdrs,
                                                     GHashTable **params);
 
+#ifdef BUILDING_ADAPTIVEDEMUX2
+gboolean _soup_message_headers_get_content_range (SoupMessageHeaders *hdrs,
+                                                   goffset *start, goffset *end,
+                                                   goffset *total_length);
+
+void _soup_message_headers_set_range (SoupMessageHeaders *hdrs, goffset start, goffset end);
+#endif
+
 void _soup_auth_authenticate (SoupAuth *auth, const char *username,
                               const char *password);
 
index 3dafe1f..b3581bd 100644 (file)
@@ -44,6 +44,7 @@ option('wavparse', type : 'feature', value : 'auto')
 option('y4m', type : 'feature', value : 'auto')
 
 # Feature options for plugins with external deps
+option('adaptivedemux2', type : 'feature', value : 'auto', description : '2nd generation adaptive demuxer plugin')
 option('aalib', type : 'feature', value : 'auto', description : 'aalib text console video sink plugin')
 option('bz2', type : 'feature', value : 'auto', description : 'libbz2 support in the matroska plugin')
 option('cairo', type : 'feature', value : 'auto', description : 'Cairo overlay plugin')
@@ -74,6 +75,10 @@ option('vpx', type : 'feature', value : 'auto', description : 'VP8 and VP9 video
 option('waveform', type : 'feature', value : 'auto', description : 'Windows waveform audio sink plugin')
 option('wavpack', type : 'feature', value : 'auto', description : 'Wavpack audio codec plugin')
 
+# HLS plugin options
+option('hls-crypto', type : 'combo', value : 'auto', choices : ['auto', 'nettle', 'libgcrypt', 'openssl'],
+       description: 'Crypto library to use for HLS plugin')
+
 # rpicamsrc plugin options
 option('rpicamsrc', type : 'feature', value : 'auto', description : 'Raspberry Pi camera module plugin')
 option('rpi-header-dir', type : 'string', value : '/opt/vc/include', description : 'Directory where VideoCore/MMAL headers and bcm_host.h can be found')
diff --git a/subprojects/gst-plugins-good/tests/check/elements/dash_mpd.c b/subprojects/gst-plugins-good/tests/check/elements/dash_mpd.c
new file mode 100644 (file)
index 0000000..5444f85
--- /dev/null
@@ -0,0 +1,6434 @@
+/* GStreamer unit test for MPEG-DASH
+ *
+ * Copyright (c) <2015> YouView TV Ltd
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "../../ext/adaptivedemux2/dash/gstmpdparser.c"
+#include "../../ext/adaptivedemux2/dash/gstxmlhelper.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdhelper.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdrepresentationbasenode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdmultsegmentbasenode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdrootnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdbaseurlnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdutctimingnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdmetricsnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdmetricsrangenode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdsnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdsegmenttimelinenode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdsegmenttemplatenode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdsegmenturlnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdsegmentlistnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdsegmentbasenode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdperiodnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdsubrepresentationnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdrepresentationnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdcontentcomponentnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdadaptationsetnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdsubsetnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdprograminformationnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdlocationnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdreportingnode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdurltypenode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpddescriptortypenode.c"
+#include "../../ext/adaptivedemux2/dash/gstmpdclient.c"
+#undef GST_CAT_DEFAULT
+
+#include <gst/check/gstcheck.h>
+
+GST_DEBUG_CATEGORY (gst_dash_demux2_debug);
+
+/*
+ * Linker liked to complain about missing downloadhelper_* symbols.
+ * The tests below don't actually use them, so this stub is intended to
+ * get rid of those warnings. Linker doesn't seem to complain anymore.
+ */
+DownloadRequest *
+downloadhelper_fetch_uri (DownloadHelper * dh, const gchar * uri,
+    const gchar * referer, DownloadFlags flags, GError ** err)
+{
+  g_assert_not_reached ();
+}
+
+/*
+ * compute the number of milliseconds contained in a duration value specified by
+ * year, month, day, hour, minute, second, millisecond
+ *
+ * This function must use the same conversion algorithm implemented in
+ * gst_xml_helper_get_prop_duration from gstmpdparser.c file.
+ */
+static guint64
+duration_to_ms (guint year, guint month, guint day, guint hour, guint minute,
+    guint second, guint millisecond)
+{
+  guint64 days = (guint64) year * 365 + (guint64) month * 30 + day;
+  guint64 hours = days * 24 + hour;
+  guint64 minutes = hours * 60 + minute;
+  guint64 seconds = minutes * 60 + second;
+  guint64 ms = seconds * 1000 + millisecond;
+  return ms;
+}
+
+static GstClockTime
+duration_to_clocktime (guint year, guint month, guint day, guint hour,
+    guint minute, guint second, guint millisecond)
+{
+  return (GST_MSECOND * duration_to_ms (year, month, day, hour, minute, second,
+          millisecond));
+}
+
+/*
+ * Test to ensure a simple mpd file successfully parses.
+ *
+ */
+GST_START_TEST (dash_mpdparser_validsimplempd)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\"> </MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* check that unset elements with default values are properly configured */
+  assert_equals_int (mpdclient->mpd_root_node->type, GST_MPD_FILE_TYPE_STATIC);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing the MPD attributes.
+ *
+ */
+GST_START_TEST (dash_mpdparser_mpd)
+{
+  GstDateTime *availabilityStartTime;
+  GstDateTime *availabilityEndTime;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     schemaLocation=\"TestSchemaLocation\""
+      "     xmlns:xsi=\"TestNamespaceXSI\""
+      "     xmlns:ext=\"TestNamespaceEXT\""
+      "     id=\"testId\""
+      "     type=\"static\""
+      "     availabilityStartTime=\"2015-03-24T1:10:50\""
+      "     availabilityEndTime=\"2015-03-24T1:10:50.123456\""
+      "     mediaPresentationDuration=\"P0Y1M2DT12H10M20.5S\""
+      "     minimumUpdatePeriod=\"P0Y1M2DT12H10M20.5S\""
+      "     minBufferTime=\"P0Y1M2DT12H10M20.5S\""
+      "     timeShiftBufferDepth=\"P0Y1M2DT12H10M20.5S\""
+      "     suggestedPresentationDelay=\"P0Y1M2DT12H10M20.5S\""
+      "     maxSegmentDuration=\"P0Y1M2DT12H10M20.5S\""
+      "     maxSubsegmentDuration=\"P0Y1M2DT12H10M20.5S\"></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  assert_equals_string (mpdclient->mpd_root_node->default_namespace,
+      "urn:mpeg:dash:schema:mpd:2011");
+  assert_equals_string (mpdclient->mpd_root_node->namespace_xsi,
+      "TestNamespaceXSI");
+  assert_equals_string (mpdclient->mpd_root_node->namespace_ext,
+      "TestNamespaceEXT");
+  assert_equals_string (mpdclient->mpd_root_node->schemaLocation,
+      "TestSchemaLocation");
+  assert_equals_string (mpdclient->mpd_root_node->id, "testId");
+
+  assert_equals_int (mpdclient->mpd_root_node->type, GST_MPD_FILE_TYPE_STATIC);
+
+  availabilityStartTime = mpdclient->mpd_root_node->availabilityStartTime;
+  assert_equals_int (gst_date_time_get_year (availabilityStartTime), 2015);
+  assert_equals_int (gst_date_time_get_month (availabilityStartTime), 3);
+  assert_equals_int (gst_date_time_get_day (availabilityStartTime), 24);
+  assert_equals_int (gst_date_time_get_hour (availabilityStartTime), 1);
+  assert_equals_int (gst_date_time_get_minute (availabilityStartTime), 10);
+  assert_equals_int (gst_date_time_get_second (availabilityStartTime), 50);
+  assert_equals_int (gst_date_time_get_microsecond (availabilityStartTime), 0);
+
+  availabilityEndTime = mpdclient->mpd_root_node->availabilityEndTime;
+  assert_equals_int (gst_date_time_get_year (availabilityEndTime), 2015);
+  assert_equals_int (gst_date_time_get_month (availabilityEndTime), 3);
+  assert_equals_int (gst_date_time_get_day (availabilityEndTime), 24);
+  assert_equals_int (gst_date_time_get_hour (availabilityEndTime), 1);
+  assert_equals_int (gst_date_time_get_minute (availabilityEndTime), 10);
+  assert_equals_int (gst_date_time_get_second (availabilityEndTime), 50);
+  assert_equals_int (gst_date_time_get_microsecond (availabilityEndTime),
+      123456);
+
+  assert_equals_uint64 (mpdclient->mpd_root_node->mediaPresentationDuration,
+      duration_to_ms (0, 1, 2, 12, 10, 20, 500));
+
+  assert_equals_uint64 (mpdclient->mpd_root_node->minimumUpdatePeriod,
+      duration_to_ms (0, 1, 2, 12, 10, 20, 500));
+
+  assert_equals_uint64 (mpdclient->mpd_root_node->minBufferTime,
+      duration_to_ms (0, 1, 2, 12, 10, 20, 500));
+
+  assert_equals_uint64 (mpdclient->mpd_root_node->timeShiftBufferDepth,
+      duration_to_ms (0, 1, 2, 12, 10, 20, 500));
+
+  assert_equals_uint64 (mpdclient->mpd_root_node->suggestedPresentationDelay,
+      duration_to_ms (0, 1, 2, 12, 10, 20, 500));
+
+  assert_equals_uint64 (mpdclient->mpd_root_node->maxSegmentDuration,
+      duration_to_ms (0, 1, 2, 12, 10, 20, 500));
+
+  assert_equals_uint64 (mpdclient->mpd_root_node->maxSubsegmentDuration,
+      duration_to_ms (0, 1, 2, 12, 10, 20, 500));
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing the ProgramInformation attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_programInformation)
+{
+  GstMPDProgramInformationNode *program;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <ProgramInformation lang=\"en\""
+      "                      moreInformationURL=\"TestMoreInformationUrl\">"
+      "    <Title>TestTitle</Title>"
+      "    <Source>TestSource</Source>"
+      "    <Copyright>TestCopyright</Copyright>"
+      "  </ProgramInformation> </MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  program =
+      (GstMPDProgramInformationNode *) mpdclient->mpd_root_node->ProgramInfos->
+      data;
+  assert_equals_string (program->lang, "en");
+  assert_equals_string (program->moreInformationURL, "TestMoreInformationUrl");
+  assert_equals_string (program->Title, "TestTitle");
+  assert_equals_string (program->Source, "TestSource");
+  assert_equals_string (program->Copyright, "TestCopyright");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing the BaseURL attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_baseURL)
+{
+  GstMPDBaseURLNode *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <BaseURL serviceLocation=\"TestServiceLocation\""
+      "     byteRange=\"TestByteRange\">TestBaseURL</BaseURL></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  baseURL = (GstMPDBaseURLNode *) mpdclient->mpd_root_node->BaseURLs->data;
+  assert_equals_string (baseURL->baseURL, "TestBaseURL");
+  assert_equals_string (baseURL->serviceLocation, "TestServiceLocation");
+  assert_equals_string (baseURL->byteRange, "TestByteRange");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing the Location attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_location)
+{
+  GstMPDLocationNode *location;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Location>TestLocation</Location></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  location = (GstMPDLocationNode *) mpdclient->mpd_root_node->Locations->data;
+  assert_equals_string (location->location, "TestLocation");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Metrics attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_metrics)
+{
+  GstMPDMetricsNode *metricsNode;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Metrics metrics=\"TestMetric\"></Metrics></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  metricsNode = (GstMPDMetricsNode *) mpdclient->mpd_root_node->Metrics->data;
+  assert_equals_string (metricsNode->metrics, "TestMetric");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Metrics Range attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_metrics_range)
+{
+  GstMPDMetricsNode *metricsNode;
+  GstMPDMetricsRangeNode *metricsRangeNode;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Metrics>"
+      "    <Range starttime=\"P0Y1M2DT12H10M20.5S\""
+      "           duration=\"P0Y1M2DT12H10M20.1234567S\">"
+      "    </Range></Metrics></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  metricsNode = (GstMPDMetricsNode *) mpdclient->mpd_root_node->Metrics->data;
+  assert_equals_pointer (metricsNode->metrics, NULL);
+  metricsRangeNode =
+      (GstMPDMetricsRangeNode *) metricsNode->MetricsRanges->data;
+  assert_equals_uint64 (metricsRangeNode->starttime, duration_to_ms (0, 1, 2,
+          12, 10, 20, 500));
+  assert_equals_uint64 (metricsRangeNode->duration, duration_to_ms (0, 1, 2, 12,
+          10, 20, 123));
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Metrics Reporting attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_metrics_reporting)
+{
+  GstMPDMetricsNode *metricsNode;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Metrics><Reporting></Reporting></Metrics></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  metricsNode = (GstMPDMetricsNode *) mpdclient->mpd_root_node->Metrics->data;
+  assert_equals_pointer (metricsNode->metrics, NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period)
+{
+  GstMPDPeriodNode *periodNode;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period id=\"TestId\""
+      "          start=\"P0Y1M2DT12H10M20.1234567S\""
+      "          duration=\"P0Y1M2DT12H10M20.7654321S\""
+      "          bitstreamSwitching=\"true\"></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  assert_equals_string (periodNode->id, "TestId");
+  assert_equals_uint64 (periodNode->start,
+      duration_to_ms (0, 1, 2, 12, 10, 20, 123));
+  assert_equals_uint64 (periodNode->duration,
+      duration_to_ms (0, 1, 2, 12, 10, 20, 765));
+  assert_equals_int (periodNode->bitstreamSwitching, 1);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period baseURL attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_baseURL)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDBaseURLNode *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <BaseURL serviceLocation=\"TestServiceLocation\""
+      "             byteRange=\"TestByteRange\">TestBaseURL</BaseURL>"
+      "  </Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  baseURL = (GstMPDBaseURLNode *) periodNode->BaseURLs->data;
+  assert_equals_string (baseURL->baseURL, "TestBaseURL");
+  assert_equals_string (baseURL->serviceLocation, "TestServiceLocation");
+  assert_equals_string (baseURL->byteRange, "TestByteRange");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentBase attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_segmentBase)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentBaseNode *segmentBase;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentBase timescale=\"123456\""
+      "                 presentationTimeOffset=\"123456789\""
+      "                 indexRange=\"100-200\""
+      "                 indexRangeExact=\"true\">"
+      "    </SegmentBase></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentBase = periodNode->SegmentBase;
+  assert_equals_uint64 (segmentBase->timescale, 123456);
+  assert_equals_uint64 (segmentBase->presentationTimeOffset, 123456789);
+  assert_equals_uint64 (segmentBase->indexRange->first_byte_pos, 100);
+  assert_equals_uint64 (segmentBase->indexRange->last_byte_pos, 200);
+  assert_equals_int (segmentBase->indexRangeExact, 1);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentBase Initialization attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_segmentBase_initialization)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentBaseNode *segmentBase;
+  GstMPDURLTypeNode *initialization;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentBase>"
+      "      <Initialisation sourceURL=\"TestSourceURL\""
+      "                      range=\"100-200\">"
+      "      </Initialisation></SegmentBase></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentBase = periodNode->SegmentBase;
+  initialization = segmentBase->Initialization;
+  assert_equals_string (initialization->sourceURL, "TestSourceURL");
+  assert_equals_uint64 (initialization->range->first_byte_pos, 100);
+  assert_equals_uint64 (initialization->range->last_byte_pos, 200);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentBase RepresentationIndex attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_segmentBase_representationIndex)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentBaseNode *segmentBase;
+  GstMPDURLTypeNode *representationIndex;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentBase>"
+      "      <RepresentationIndex sourceURL=\"TestSourceURL\""
+      "                           range=\"100-200\">"
+      "      </RepresentationIndex></SegmentBase></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentBase = periodNode->SegmentBase;
+  representationIndex = segmentBase->RepresentationIndex;
+  assert_equals_string (representationIndex->sourceURL, "TestSourceURL");
+  assert_equals_uint64 (representationIndex->range->first_byte_pos, 100);
+  assert_equals_uint64 (representationIndex->range->last_byte_pos, 200);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentList attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_segmentList)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentListNode *segmentList;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period><SegmentList duration=\"1\"></SegmentList></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentList = periodNode->SegmentList;
+  fail_if (segmentList == NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentList MultipleSegmentBaseType attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_segmentList_multipleSegmentBaseType)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentListNode *segmentList;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentList duration=\"10\""
+      "                 startNumber=\"11\">"
+      "    </SegmentList></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentList = periodNode->SegmentList;
+
+  assert_equals_uint64 (GST_MPD_MULT_SEGMENT_BASE_NODE (segmentList)->duration,
+      10);
+  assert_equals_uint64 (GST_MPD_MULT_SEGMENT_BASE_NODE
+      (segmentList)->startNumber, 11);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentList MultipleSegmentBaseType SegmentBaseType
+ * attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_segmentList_multipleSegmentBaseType_segmentBaseType)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentListNode *segmentList;
+  GstMPDSegmentBaseNode *segmentBase;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentList timescale=\"10\""
+      "                 duration=\"1\""
+      "                 presentationTimeOffset=\"11\""
+      "                 indexRange=\"20-21\""
+      "                 indexRangeExact=\"false\">"
+      "    </SegmentList></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentList = periodNode->SegmentList;
+  segmentBase = GST_MPD_MULT_SEGMENT_BASE_NODE (segmentList)->SegmentBase;
+  assert_equals_uint64 (segmentBase->timescale, 10);
+  assert_equals_uint64 (segmentBase->presentationTimeOffset, 11);
+  assert_equals_uint64 (segmentBase->indexRange->first_byte_pos, 20);
+  assert_equals_uint64 (segmentBase->indexRange->last_byte_pos, 21);
+  assert_equals_int (segmentBase->indexRangeExact, FALSE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentList MultipleSegmentBaseType SegmentTimeline
+ * attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_segmentList_multipleSegmentBaseType_segmentTimeline)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentListNode *segmentList;
+  GstMPDSegmentTimelineNode *segmentTimeline;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentList>"
+      "      <SegmentTimeline>"
+      "      </SegmentTimeline></SegmentList></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentList = periodNode->SegmentList;
+  segmentTimeline =
+      GST_MPD_MULT_SEGMENT_BASE_NODE (segmentList)->SegmentTimeline;
+  fail_if (segmentTimeline == NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentList MultipleSegmentBaseType SegmentTimeline S
+ * attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_segmentList_multipleSegmentBaseType_segmentTimeline_s)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentListNode *segmentList;
+  GstMPDSegmentTimelineNode *segmentTimeline;
+  GstMPDSNode *sNode;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentList>"
+      "      <SegmentTimeline>"
+      "        <S t=\"1\" d=\"2\" r=\"3\">"
+      "        </S></SegmentTimeline></SegmentList></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentList = periodNode->SegmentList;
+  segmentTimeline =
+      GST_MPD_MULT_SEGMENT_BASE_NODE (segmentList)->SegmentTimeline;
+  sNode = (GstMPDSNode *) g_queue_peek_head (&segmentTimeline->S);
+  assert_equals_uint64 (sNode->t, 1);
+  assert_equals_uint64 (sNode->d, 2);
+  assert_equals_uint64 (sNode->r, 3);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentList MultipleSegmentBaseType BitstreamSwitching
+ * attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_segmentList_multipleSegmentBaseType_bitstreamSwitching)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentListNode *segmentList;
+  GstMPDURLTypeNode *bitstreamSwitching;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentList duration=\"0\">"
+      "      <BitstreamSwitching sourceURL=\"TestSourceURL\""
+      "                          range=\"100-200\">"
+      "      </BitstreamSwitching></SegmentList></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentList = periodNode->SegmentList;
+
+  bitstreamSwitching =
+      GST_MPD_MULT_SEGMENT_BASE_NODE (segmentList)->BitstreamSwitching;
+  assert_equals_string (bitstreamSwitching->sourceURL, "TestSourceURL");
+  assert_equals_uint64 (bitstreamSwitching->range->first_byte_pos, 100);
+  assert_equals_uint64 (bitstreamSwitching->range->last_byte_pos, 200);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentList SegmentURL attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_segmentList_segmentURL)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentListNode *segmentList;
+  GstMPDSegmentURLNode *segmentURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentList duration=\"1\">"
+      "      <SegmentURL media=\"TestMedia\""
+      "                  mediaRange=\"100-200\""
+      "                  index=\"TestIndex\""
+      "                  indexRange=\"300-400\">"
+      "      </SegmentURL></SegmentList></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentList = periodNode->SegmentList;
+  segmentURL = (GstMPDSegmentURLNode *) segmentList->SegmentURL->data;
+  assert_equals_string (segmentURL->media, "TestMedia");
+  assert_equals_uint64 (segmentURL->mediaRange->first_byte_pos, 100);
+  assert_equals_uint64 (segmentURL->mediaRange->last_byte_pos, 200);
+  assert_equals_string (segmentURL->index, "TestIndex");
+  assert_equals_uint64 (segmentURL->indexRange->first_byte_pos, 300);
+  assert_equals_uint64 (segmentURL->indexRange->last_byte_pos, 400);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentTemplate attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_segmentTemplate)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentTemplate media=\"TestMedia\""
+      "                     duration=\"0\""
+      "                     index=\"TestIndex\""
+      "                     initialization=\"TestInitialization\""
+      "                     bitstreamSwitching=\"TestBitstreamSwitching\">"
+      "    </SegmentTemplate></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentTemplate = periodNode->SegmentTemplate;
+  assert_equals_string (segmentTemplate->media, "TestMedia");
+  assert_equals_string (segmentTemplate->index, "TestIndex");
+  assert_equals_string (segmentTemplate->initialization, "TestInitialization");
+  assert_equals_string (segmentTemplate->bitstreamSwitching,
+      "TestBitstreamSwitching");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentTemplate attributes where a
+ * presentationTimeOffset attribute has been specified
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_segmentTemplateWithPresentationTimeOffset)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period start=\"PT1M\" duration=\"PT40S\">"
+      "    <AdaptationSet"
+      "      bitstreamSwitching=\"false\""
+      "      mimeType=\"video/mp4\""
+      "      contentType=\"video\">"
+      "      <SegmentTemplate media=\"$RepresentationID$/TestMedia-$Time$.mp4\""
+      "                     index=\"$RepresentationID$/TestIndex.mp4\""
+      "                     timescale=\"100\""
+      "                     presentationTimeOffset=\"6000\""
+      "                     initialization=\"$RepresentationID$/TestInitialization\""
+      "                     bitstreamSwitching=\"true\">"
+      "        <SegmentTimeline>"
+      "          <S d=\"400\" r=\"9\" t=\"100\"/>"
+      "        </SegmentTimeline></SegmentTemplate>"
+      "      <Representation bandwidth=\"95866\" frameRate=\"90000/3600\""
+      "        id=\"vrep\" /></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  GstActiveStream *activeStream;
+  GstMediaFragmentInfo fragment;
+  GstClockTime expectedDuration;
+  GstClockTime expectedTimestamp;
+  GstMPDClient2 *mpdclient;
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+
+  mpdclient = gst_mpd_client2_new ();
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  periodNode =
+      (GstMPDPeriodNode *) g_list_nth_data (mpdclient->mpd_root_node->Periods,
+      0);
+  fail_if (periodNode == NULL);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  segmentTemplate = adapt_set->SegmentTemplate;
+  fail_if (segmentTemplate == NULL);
+  assert_equals_string (segmentTemplate->media,
+      "$RepresentationID$/TestMedia-$Time$.mp4");
+  assert_equals_string (segmentTemplate->index,
+      "$RepresentationID$/TestIndex.mp4");
+  assert_equals_string (segmentTemplate->initialization,
+      "$RepresentationID$/TestInitialization");
+  assert_equals_string (segmentTemplate->bitstreamSwitching, "true");
+
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  expectedDuration = duration_to_ms (0, 0, 0, 0, 0, 4, 0);
+  /* start = Period@start + S@t - presentationTimeOffset */
+  expectedTimestamp = duration_to_ms (0, 0, 0, 0, 0, 1, 0);
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+  /* the $Time$ expansion uses the @t value, without including
+     Period@start or presentationTimeOffset */
+  assert_equals_string (fragment.uri, "/vrep/TestMedia-100.mp4");
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentTemplate MultipleSegmentBaseType attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_segmentTemplate_multipleSegmentBaseType)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentTemplate duration=\"10\""
+      "                     startNumber=\"11\">"
+      "    </SegmentTemplate></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentTemplate = periodNode->SegmentTemplate;
+
+  assert_equals_uint64 (GST_MPD_MULT_SEGMENT_BASE_NODE
+      (segmentTemplate)->duration, 10);
+  assert_equals_uint64 (GST_MPD_MULT_SEGMENT_BASE_NODE
+      (segmentTemplate)->startNumber, 11);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentTemplate MultipleSegmentBaseType SegmentBaseType
+ * attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_segmentTemplate_multipleSegmentBaseType_segmentBaseType)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  GstMPDSegmentBaseNode *segmentBase;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentTemplate timescale=\"123456\""
+      "                     duration=\"1\""
+      "                     presentationTimeOffset=\"123456789\""
+      "                     indexRange=\"100-200\""
+      "                     indexRangeExact=\"true\">"
+      "    </SegmentTemplate></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentTemplate = periodNode->SegmentTemplate;
+  segmentBase = GST_MPD_MULT_SEGMENT_BASE_NODE (segmentTemplate)->SegmentBase;
+  assert_equals_uint64 (segmentBase->timescale, 123456);
+  assert_equals_uint64 (segmentBase->presentationTimeOffset, 123456789);
+  assert_equals_uint64 (segmentBase->indexRange->first_byte_pos, 100);
+  assert_equals_uint64 (segmentBase->indexRange->last_byte_pos, 200);
+  assert_equals_int (segmentBase->indexRangeExact, TRUE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentTemplate MultipleSegmentBaseType SegmentTimeline
+ * attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_segmentTemplate_multipleSegmentBaseType_segmentTimeline)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  GstMPDSegmentTimelineNode *segmentTimeline;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentTemplate>"
+      "      <SegmentTimeline>"
+      "      </SegmentTimeline></SegmentTemplate></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentTemplate = periodNode->SegmentTemplate;
+
+  segmentTimeline = (GstMPDSegmentTimelineNode *)
+      GST_MPD_MULT_SEGMENT_BASE_NODE (segmentTemplate)->SegmentTimeline;
+  fail_if (segmentTimeline == NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentTemplate MultipleSegmentBaseType SegmentTimeline
+ * S attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_segmentTemplate_multipleSegmentBaseType_segmentTimeline_s)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  GstMPDSegmentTimelineNode *segmentTimeline;
+  GstMPDSNode *sNode;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentTemplate>"
+      "      <SegmentTimeline>"
+      "        <S t=\"1\" d=\"2\" r=\"3\">"
+      "        </S></SegmentTimeline></SegmentTemplate></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentTemplate = periodNode->SegmentTemplate;
+  segmentTimeline = (GstMPDSegmentTimelineNode *)
+      GST_MPD_MULT_SEGMENT_BASE_NODE (segmentTemplate)->SegmentTimeline;
+  sNode = (GstMPDSNode *) g_queue_peek_head (&segmentTimeline->S);
+  assert_equals_uint64 (sNode->t, 1);
+  assert_equals_uint64 (sNode->d, 2);
+  assert_equals_uint64 (sNode->r, 3);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period SegmentTemplate MultipleSegmentBaseType
+ * BitstreamSwitching attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_segmentTemplate_multipleSegmentBaseType_bitstreamSwitching)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  GstMPDURLTypeNode *bitstreamSwitching;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentTemplate duration=\"1\">"
+      "      <BitstreamSwitching sourceURL=\"TestSourceURL\""
+      "                          range=\"100-200\">"
+      "      </BitstreamSwitching></SegmentTemplate></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentTemplate = periodNode->SegmentTemplate;
+  bitstreamSwitching =
+      GST_MPD_MULT_SEGMENT_BASE_NODE (segmentTemplate)->BitstreamSwitching;
+  assert_equals_string (bitstreamSwitching->sourceURL, "TestSourceURL");
+  assert_equals_uint64 (bitstreamSwitching->range->first_byte_pos, 100);
+  assert_equals_uint64 (bitstreamSwitching->range->last_byte_pos, 200);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet id=\"7\""
+      "                   group=\"8\""
+      "                   lang=\"en\""
+      "                   contentType=\"TestContentType\""
+      "                   par=\"4:3\""
+      "                   minBandwidth=\"100\""
+      "                   maxBandwidth=\"200\""
+      "                   minWidth=\"1000\""
+      "                   maxWidth=\"2000\""
+      "                   minHeight=\"1100\""
+      "                   maxHeight=\"2100\""
+      "                   minFrameRate=\"25/123\""
+      "                   maxFrameRate=\"26\""
+      "                   segmentAlignment=\"2\""
+      "                   subsegmentAlignment=\"false\""
+      "                   subsegmentStartsWithSAP=\"6\""
+      "                   bitstreamSwitching=\"false\">"
+      "    </AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  assert_equals_uint64 (adaptationSet->id, 7);
+  assert_equals_uint64 (adaptationSet->group, 8);
+  assert_equals_string (adaptationSet->lang, "en");
+  assert_equals_string (adaptationSet->contentType, "TestContentType");
+  assert_equals_uint64 (adaptationSet->par->num, 4);
+  assert_equals_uint64 (adaptationSet->par->den, 3);
+  assert_equals_uint64 (adaptationSet->minBandwidth, 100);
+  assert_equals_uint64 (adaptationSet->maxBandwidth, 200);
+  assert_equals_uint64 (adaptationSet->minWidth, 1000);
+  assert_equals_uint64 (adaptationSet->maxWidth, 2000);
+  assert_equals_uint64 (adaptationSet->minHeight, 1100);
+  assert_equals_uint64 (adaptationSet->maxHeight, 2100);
+  assert_equals_uint64 (GST_MPD_REPRESENTATION_BASE_NODE
+      (adaptationSet)->minFrameRate->num, 25);
+  assert_equals_uint64 (GST_MPD_REPRESENTATION_BASE_NODE
+      (adaptationSet)->minFrameRate->den, 123);
+  assert_equals_uint64 (GST_MPD_REPRESENTATION_BASE_NODE
+      (adaptationSet)->maxFrameRate->num, 26);
+  assert_equals_uint64 (GST_MPD_REPRESENTATION_BASE_NODE
+      (adaptationSet)->maxFrameRate->den, 1);
+  assert_equals_int (adaptationSet->segmentAlignment->flag, 1);
+  assert_equals_uint64 (adaptationSet->segmentAlignment->value, 2);
+  assert_equals_int (adaptationSet->subsegmentAlignment->flag, 0);
+  assert_equals_uint64 (adaptationSet->subsegmentAlignment->value, 0);
+  assert_equals_int (adaptationSet->subsegmentStartsWithSAP, 6);
+  assert_equals_int (adaptationSet->bitstreamSwitching, 0);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet RepresentationBase attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_representationBase)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationBaseNode *representationBase;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet profiles=\"TestProfiles\""
+      "                   width=\"100\""
+      "                   height=\"200\""
+      "                   sar=\"10:20\""
+      "                   frameRate=\"30/40\""
+      "                   audioSamplingRate=\"TestAudioSamplingRate\""
+      "                   mimeType=\"TestMimeType\""
+      "                   segmentProfiles=\"TestSegmentProfiles\""
+      "                   codecs=\"TestCodecs\""
+      "                   maximumSAPPeriod=\"3.4\""
+      "                   startWithSAP=\"0\""
+      "                   maxPlayoutRate=\"1.2\""
+      "                   codingDependency=\"false\""
+      "                   scanType=\"progressive\">"
+      "    </AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representationBase = GST_MPD_REPRESENTATION_BASE_NODE (adaptationSet);
+  assert_equals_string (representationBase->profiles, "TestProfiles");
+  assert_equals_uint64 (representationBase->width, 100);
+  assert_equals_uint64 (representationBase->height, 200);
+  assert_equals_uint64 (representationBase->sar->num, 10);
+  assert_equals_uint64 (representationBase->sar->den, 20);
+  assert_equals_uint64 (representationBase->frameRate->num, 30);
+  assert_equals_uint64 (representationBase->frameRate->den, 40);
+  assert_equals_string (representationBase->audioSamplingRate,
+      "TestAudioSamplingRate");
+  assert_equals_string (representationBase->mimeType, "TestMimeType");
+  assert_equals_string (representationBase->segmentProfiles,
+      "TestSegmentProfiles");
+  assert_equals_string (representationBase->codecs, "TestCodecs");
+  assert_equals_float (representationBase->maximumSAPPeriod, 3.4);
+  assert_equals_int (representationBase->startWithSAP, GST_SAP_TYPE_0);
+  assert_equals_float (representationBase->maxPlayoutRate, 1.2);
+  assert_equals_float (representationBase->codingDependency, 0);
+  assert_equals_string (representationBase->scanType, "progressive");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet RepresentationBase FramePacking attributes
+ *
+ */
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_representationBase_framePacking) {
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationBaseNode *representationBase;
+  GstMPDDescriptorTypeNode *framePacking;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <FramePacking schemeIdUri=\"TestSchemeIdUri\""
+      "                    value=\"TestValue\">"
+      "      </FramePacking></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representationBase = GST_MPD_REPRESENTATION_BASE_NODE (adaptationSet);
+  framePacking =
+      (GstMPDDescriptorTypeNode *) representationBase->FramePacking->data;
+  assert_equals_string (framePacking->schemeIdUri, "TestSchemeIdUri");
+  assert_equals_string (framePacking->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet RepresentationBase
+ * AudioChannelConfiguration attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_representationBase_audioChannelConfiguration)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationBaseNode *representationBase;
+  GstMPDDescriptorTypeNode *audioChannelConfiguration;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <AudioChannelConfiguration schemeIdUri=\"TestSchemeIdUri\""
+      "                                 value=\"TestValue\">"
+      "      </AudioChannelConfiguration></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representationBase = GST_MPD_REPRESENTATION_BASE_NODE (adaptationSet);
+  audioChannelConfiguration = (GstMPDDescriptorTypeNode *)
+      representationBase->AudioChannelConfiguration->data;
+  assert_equals_string (audioChannelConfiguration->schemeIdUri,
+      "TestSchemeIdUri");
+  assert_equals_string (audioChannelConfiguration->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet RepresentationBase ContentProtection
+ * attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_representationBase_contentProtection) {
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationBaseNode *representationBase;
+  GstMPDDescriptorTypeNode *contentProtection;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <ContentProtection schemeIdUri=\"TestSchemeIdUri\""
+      "                         value=\"TestValue\">"
+      "      </ContentProtection></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representationBase = GST_MPD_REPRESENTATION_BASE_NODE (adaptationSet);
+  contentProtection =
+      (GstMPDDescriptorTypeNode *) representationBase->ContentProtection->data;
+  assert_equals_string (contentProtection->schemeIdUri, "TestSchemeIdUri");
+  assert_equals_string (contentProtection->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing ContentProtection element that has no value attribute
+ */
+GST_START_TEST (dash_mpdparser_contentProtection_no_value)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationBaseNode *representationBase;
+  GstMPDDescriptorTypeNode *contentProtection;
+  const gchar *xml =
+      "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     xmlns:mspr=\"urn:microsoft:playready\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <ContentProtection schemeIdUri=\"urn:mpeg:dash:mp4protection:2011\" value=\"cenc\"/>"
+      "      <ContentProtection xmlns:mas=\"urn:marlin:mas:1-0:services:schemas:mpd\" schemeIdUri=\"urn:uuid:5e629af5-38da-4063-8977-97ffbd9902d4\">"
+      "              <mas:MarlinContentIds>"
+      "                <mas:MarlinContentId>urn:marlin:kid:02020202020202020202020202020202</mas:MarlinContentId>"
+      "       </mas:MarlinContentIds>"
+      "      </ContentProtection>"
+      "      <ContentProtection schemeIdUri=\"urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95\" value=\"MSPR 2.0\">"
+      "        <mspr:pro>dGVzdA==</mspr:pro>"
+      "     </ContentProtection>" "</AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+  gchar *str;
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representationBase = GST_MPD_REPRESENTATION_BASE_NODE (adaptationSet);
+  assert_equals_int (g_list_length (representationBase->ContentProtection), 3);
+  contentProtection = (GstMPDDescriptorTypeNode *)
+      g_list_nth (representationBase->ContentProtection, 1)->data;
+  assert_equals_string (contentProtection->schemeIdUri,
+      "urn:uuid:5e629af5-38da-4063-8977-97ffbd9902d4");
+  fail_if (contentProtection->value == NULL);
+  /* We can't do a simple compare of value (which should be an XML dump
+     of the ContentProtection element), because the whitespace
+     formatting from xmlDump might differ between versions of libxml */
+  str = strstr (contentProtection->value, "<ContentProtection");
+  fail_if (str == NULL);
+  str = strstr (contentProtection->value, "<mas:MarlinContentIds>");
+  fail_if (str == NULL);
+  str = strstr (contentProtection->value, "<mas:MarlinContentId>");
+  fail_if (str == NULL);
+  str =
+      strstr (contentProtection->value,
+      "urn:marlin:kid:02020202020202020202020202020202");
+  fail_if (str == NULL);
+  str = strstr (contentProtection->value, "</ContentProtection>");
+  fail_if (str == NULL);
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing ContentProtection element that has no value attribute
+ * nor an XML encoding
+ */
+GST_START_TEST (dash_mpdparser_contentProtection_no_value_no_encoding)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationBaseNode *representationBase;
+  GstMPDDescriptorTypeNode *contentProtection;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <ContentProtection schemeIdUri=\"urn:mpeg:dash:mp4protection:2011\" value=\"cenc\"/>"
+      "      <ContentProtection xmlns:mas=\"urn:marlin:mas:1-0:services:schemas:mpd\" schemeIdUri=\"urn:uuid:5e629af5-38da-4063-8977-97ffbd9902d4\">"
+      "              <mas:MarlinContentIds>"
+      "                <mas:MarlinContentId>urn:marlin:kid:02020202020202020202020202020202</mas:MarlinContentId>"
+      "       </mas:MarlinContentIds>"
+      "     </ContentProtection>" "</AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representationBase = GST_MPD_REPRESENTATION_BASE_NODE (adaptationSet);
+  assert_equals_int (g_list_length (representationBase->ContentProtection), 2);
+  contentProtection = (GstMPDDescriptorTypeNode *)
+      g_list_nth (representationBase->ContentProtection, 1)->data;
+  assert_equals_string (contentProtection->schemeIdUri,
+      "urn:uuid:5e629af5-38da-4063-8977-97ffbd9902d4");
+  fail_if (contentProtection->value == NULL);
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Accessibility attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_accessibility)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDDescriptorTypeNode *accessibility;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Accessibility schemeIdUri=\"TestSchemeIdUri\""
+      "                     value=\"TestValue\">"
+      "      </Accessibility></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  accessibility =
+      (GstMPDDescriptorTypeNode *) adaptationSet->Accessibility->data;
+  assert_equals_string (accessibility->schemeIdUri, "TestSchemeIdUri");
+  assert_equals_string (accessibility->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Role attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_role)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDDescriptorTypeNode *role;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Role schemeIdUri=\"TestSchemeIdUri\""
+      "            value=\"TestValue\">"
+      "      </Role></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  role = (GstMPDDescriptorTypeNode *) adaptationSet->Role->data;
+  assert_equals_string (role->schemeIdUri, "TestSchemeIdUri");
+  assert_equals_string (role->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Rating attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_rating)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDDescriptorTypeNode *rating;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Rating schemeIdUri=\"TestSchemeIdUri\""
+      "              value=\"TestValue\">"
+      "      </Rating></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  rating = (GstMPDDescriptorTypeNode *) adaptationSet->Rating->data;
+  assert_equals_string (rating->schemeIdUri, "TestSchemeIdUri");
+  assert_equals_string (rating->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Viewpoint attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_viewpoint)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDDescriptorTypeNode *viewpoint;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Viewpoint schemeIdUri=\"TestSchemeIdUri\""
+      "                 value=\"TestValue\">"
+      "      </Viewpoint></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  viewpoint = (GstMPDDescriptorTypeNode *) adaptationSet->Viewpoint->data;
+  assert_equals_string (viewpoint->schemeIdUri, "TestSchemeIdUri");
+  assert_equals_string (viewpoint->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet ContentComponent attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_contentComponent)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDContentComponentNode *contentComponent;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <ContentComponent id=\"1\""
+      "                        lang=\"en\""
+      "                        contentType=\"TestContentType\""
+      "                        par=\"10:20\">"
+      "      </ContentComponent></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  contentComponent = (GstMPDContentComponentNode *)
+      adaptationSet->ContentComponents->data;
+  assert_equals_uint64 (contentComponent->id, 1);
+  assert_equals_string (contentComponent->lang, "en");
+  assert_equals_string (contentComponent->contentType, "TestContentType");
+  assert_equals_uint64 (contentComponent->par->num, 10);
+  assert_equals_uint64 (contentComponent->par->den, 20);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet ContentComponent Accessibility attributes
+ *
+ */
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_contentComponent_accessibility) {
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDContentComponentNode *contentComponent;
+  GstMPDDescriptorTypeNode *accessibility;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <ContentComponent>"
+      "        <Accessibility schemeIdUri=\"TestSchemeIdUri\""
+      "                       value=\"TestValue\">"
+      "        </Accessibility>"
+      "      </ContentComponent></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  contentComponent = (GstMPDContentComponentNode *)
+      adaptationSet->ContentComponents->data;
+  accessibility =
+      (GstMPDDescriptorTypeNode *) contentComponent->Accessibility->data;
+  assert_equals_string (accessibility->schemeIdUri, "TestSchemeIdUri");
+  assert_equals_string (accessibility->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet ContentComponent Role attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_contentComponent_role)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDContentComponentNode *contentComponent;
+  GstMPDDescriptorTypeNode *role;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <ContentComponent>"
+      "        <Role schemeIdUri=\"TestSchemeIdUri\""
+      "              value=\"TestValue\">"
+      "        </Role></ContentComponent></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  contentComponent = (GstMPDContentComponentNode *)
+      adaptationSet->ContentComponents->data;
+  role = (GstMPDDescriptorTypeNode *) contentComponent->Role->data;
+  assert_equals_string (role->schemeIdUri, "TestSchemeIdUri");
+  assert_equals_string (role->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet ContentComponent Rating attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_contentComponent_rating)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDContentComponentNode *contentComponent;
+  GstMPDDescriptorTypeNode *rating;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <ContentComponent>"
+      "        <Rating schemeIdUri=\"TestSchemeIdUri\""
+      "                value=\"TestValue\">"
+      "        </Rating>"
+      "      </ContentComponent></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  contentComponent = (GstMPDContentComponentNode *)
+      adaptationSet->ContentComponents->data;
+  rating = (GstMPDDescriptorTypeNode *) contentComponent->Rating->data;
+  assert_equals_string (rating->schemeIdUri, "TestSchemeIdUri");
+  assert_equals_string (rating->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet ContentComponent Viewpoint attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_contentComponent_viewpoint)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDContentComponentNode *contentComponent;
+  GstMPDDescriptorTypeNode *viewpoint;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <ContentComponent>"
+      "        <Viewpoint schemeIdUri=\"TestSchemeIdUri\""
+      "                   value=\"TestValue\">"
+      "        </Viewpoint>"
+      "      </ContentComponent></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  contentComponent = (GstMPDContentComponentNode *)
+      adaptationSet->ContentComponents->data;
+  viewpoint = (GstMPDDescriptorTypeNode *) contentComponent->Viewpoint->data;
+  assert_equals_string (viewpoint->schemeIdUri, "TestSchemeIdUri");
+  assert_equals_string (viewpoint->value, "TestValue");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet BaseURL attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_baseURL)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDBaseURLNode *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <BaseURL serviceLocation=\"TestServiceLocation\""
+      "               byteRange=\"TestByteRange\">TestBaseURL</BaseURL>"
+      "    </AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  baseURL = (GstMPDBaseURLNode *) adaptationSet->BaseURLs->data;
+  assert_equals_string (baseURL->baseURL, "TestBaseURL");
+  assert_equals_string (baseURL->serviceLocation, "TestServiceLocation");
+  assert_equals_string (baseURL->byteRange, "TestByteRange");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet SegmentBase attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_segmentBase)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDSegmentBaseNode *segmentBase;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <SegmentBase timescale=\"123456\""
+      "                   presentationTimeOffset=\"123456789\""
+      "                   indexRange=\"100-200\""
+      "                   indexRangeExact=\"true\">"
+      "      </SegmentBase></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  segmentBase = adaptationSet->SegmentBase;
+  assert_equals_uint64 (segmentBase->timescale, 123456);
+  assert_equals_uint64 (segmentBase->presentationTimeOffset, 123456789);
+  assert_equals_uint64 (segmentBase->indexRange->first_byte_pos, 100);
+  assert_equals_uint64 (segmentBase->indexRange->last_byte_pos, 200);
+  assert_equals_int (segmentBase->indexRangeExact, TRUE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet SegmentBase Initialization attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_segmentBase_initialization)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDSegmentBaseNode *segmentBase;
+  GstMPDURLTypeNode *initialization;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <SegmentBase>"
+      "        <Initialisation sourceURL=\"TestSourceURL\""
+      "                        range=\"100-200\">"
+      "        </Initialisation></SegmentBase></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  segmentBase = adaptationSet->SegmentBase;
+  initialization = segmentBase->Initialization;
+  assert_equals_string (initialization->sourceURL, "TestSourceURL");
+  assert_equals_uint64 (initialization->range->first_byte_pos, 100);
+  assert_equals_uint64 (initialization->range->last_byte_pos, 200);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet SegmentBase RepresentationIndex attributes
+ *
+ */
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_segmentBase_representationIndex) {
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDSegmentBaseNode *segmentBase;
+  GstMPDURLTypeNode *representationIndex;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <SegmentBase>"
+      "        <RepresentationIndex sourceURL=\"TestSourceURL\""
+      "                             range=\"100-200\">"
+      "        </RepresentationIndex>"
+      "      </SegmentBase></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  segmentBase = adaptationSet->SegmentBase;
+  representationIndex = segmentBase->RepresentationIndex;
+  assert_equals_string (representationIndex->sourceURL, "TestSourceURL");
+  assert_equals_uint64 (representationIndex->range->first_byte_pos, 100);
+  assert_equals_uint64 (representationIndex->range->last_byte_pos, 200);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet SegmentList attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_segmentList)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDSegmentListNode *segmentList;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <SegmentList duration=\"1\"></SegmentList></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  segmentList = adaptationSet->SegmentList;
+  fail_if (segmentList == NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet SegmentTemplate attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_segmentTemplate)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <SegmentTemplate media=\"TestMedia\""
+      "                       duration=\"1\""
+      "                       index=\"TestIndex\""
+      "                       initialization=\"TestInitialization\""
+      "                       bitstreamSwitching=\"TestBitstreamSwitching\">"
+      "      </SegmentTemplate></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  segmentTemplate = adaptationSet->SegmentTemplate;
+  assert_equals_string (segmentTemplate->media, "TestMedia");
+  assert_equals_string (segmentTemplate->index, "TestIndex");
+  assert_equals_string (segmentTemplate->initialization, "TestInitialization");
+  assert_equals_string (segmentTemplate->bitstreamSwitching,
+      "TestBitstreamSwitching");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_representation_segmentTemplate_inherit)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentTemplate media=\"ParentMedia\" duration=\"1\" "
+      "                     initialization=\"ParentInitialization\">"
+      "    </SegmentTemplate>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"1\" bandwidth=\"5000\">"
+      "      <SegmentTemplate media=\"TestMedia\""
+      "                       index=\"TestIndex\""
+      "                       bitstreamSwitching=\"TestBitstreamSwitching\">"
+      "      </SegmentTemplate></Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation =
+      (GstMPDRepresentationNode *) adaptationSet->Representations->data;
+  segmentTemplate = representation->SegmentTemplate;
+  assert_equals_string (segmentTemplate->media, "TestMedia");
+  assert_equals_string (segmentTemplate->index, "TestIndex");
+  assert_equals_string (segmentTemplate->initialization,
+      "ParentInitialization");
+  assert_equals_string (segmentTemplate->bitstreamSwitching,
+      "TestBitstreamSwitching");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_representation_segmentBase_inherit) {
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  GstMPDSegmentBaseNode *segmentBase;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentBase timescale=\"123456\""
+      "                 presentationTimeOffset=\"123456789\""
+      "                 indexRange=\"100-200\""
+      "                 indexRangeExact=\"true\">"
+      "      <Initialisation sourceURL=\"TestSourceURL\""
+      "                      range=\"100-200\" />"
+      "    </SegmentBase>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"1\" bandwidth=\"5000\">"
+      "      <SegmentBase>"
+      "      </SegmentBase></Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation =
+      (GstMPDRepresentationNode *) adaptationSet->Representations->data;
+  segmentBase = representation->SegmentBase;
+  assert_equals_int (segmentBase->timescale, 123456);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet SegmentTemplate attributes with
+ * inheritance
+ */
+GST_START_TEST (dash_mpdparser_adapt_repr_segmentTemplate_inherit)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  GstMPDRepresentationNode *representation;
+  GstMPDSegmentBaseNode *segmentBase;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period duration=\"PT0H5M0.000S\">"
+      "    <AdaptationSet maxWidth=\"1280\" maxHeight=\"720\" maxFrameRate=\"50\">"
+      "      <SegmentTemplate initialization=\"set1_init.mp4\"/>"
+      "      <Representation id=\"1\" mimeType=\"video/mp4\" codecs=\"avc1.640020\" "
+      "          width=\"1280\" height=\"720\" frameRate=\"50\" bandwidth=\"30000\">"
+      "        <SegmentTemplate timescale=\"12800\" media=\"track1_$Number$.m4s\" startNumber=\"1\" duration=\"25600\"/>"
+      "  </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+  segmentTemplate = representation->SegmentTemplate;
+  fail_if (segmentTemplate == NULL);
+  segmentBase = GST_MPD_MULT_SEGMENT_BASE_NODE (segmentTemplate)->SegmentBase;
+
+  assert_equals_uint64 (segmentBase->timescale, 12800);
+  assert_equals_uint64 (GST_MPD_MULT_SEGMENT_BASE_NODE
+      (segmentTemplate)->duration, 25600);
+  assert_equals_uint64 (GST_MPD_MULT_SEGMENT_BASE_NODE
+      (segmentTemplate)->startNumber, 1);
+  assert_equals_string (segmentTemplate->media, "track1_$Number$.m4s");
+  assert_equals_string (segmentTemplate->initialization, "set1_init.mp4");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+/*
+ * Test parsing Period AdaptationSet SegmentTemplate attributes with
+ * inheritance
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_segmentTemplate_inherit)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <SegmentTemplate media=\"ParentMedia\" duration=\"1\" "
+      "                     initialization=\"ParentInitialization\">"
+      "    </SegmentTemplate>"
+      "    <AdaptationSet>"
+      "      <SegmentTemplate media=\"TestMedia\""
+      "                       duration=\"1\""
+      "                       index=\"TestIndex\""
+      "                       bitstreamSwitching=\"TestBitstreamSwitching\">"
+      "      </SegmentTemplate></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  segmentTemplate = adaptationSet->SegmentTemplate;
+  assert_equals_string (segmentTemplate->media, "TestMedia");
+  assert_equals_string (segmentTemplate->index, "TestIndex");
+  assert_equals_string (segmentTemplate->initialization,
+      "ParentInitialization");
+  assert_equals_string (segmentTemplate->bitstreamSwitching,
+      "TestBitstreamSwitching");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Representation attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_representation)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"Test_Id\""
+      "                      bandwidth=\"100\""
+      "                      qualityRanking=\"200\""
+      "                      dependencyId=\"one two three\""
+      "                      mediaStreamStructureId=\"\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+  assert_equals_string (representation->id, "Test_Id");
+  assert_equals_uint64 (representation->bandwidth, 100);
+  assert_equals_uint64 (representation->qualityRanking, 200);
+  assert_equals_string (representation->dependencyId[0], "one");
+  assert_equals_string (representation->dependencyId[1], "two");
+  assert_equals_string (representation->dependencyId[2], "three");
+  assert_equals_pointer (representation->dependencyId[3], NULL);
+  assert_equals_pointer (representation->mediaStreamStructureId[0], NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Representation RepresentationBaseType attributes
+ *
+ */
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_representation_representationBase) {
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+
+  fail_if (representation == NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Representation BaseURL attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_representation_baseURL)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  GstMPDBaseURLNode *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <BaseURL serviceLocation=\"TestServiceLocation\""
+      "                 byteRange=\"TestByteRange\">TestBaseURL</BaseURL>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+  baseURL = (GstMPDBaseURLNode *) representation->BaseURLs->data;
+  assert_equals_string (baseURL->baseURL, "TestBaseURL");
+  assert_equals_string (baseURL->serviceLocation, "TestServiceLocation");
+  assert_equals_string (baseURL->byteRange, "TestByteRange");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Representation SubRepresentation attributes
+ *
+ */
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_representation_subRepresentation) {
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  GstMPDSubRepresentationNode *subRepresentation;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SubRepresentation level=\"100\""
+      "                           dependencyLevel=\"1 2 3\""
+      "                           bandwidth=\"200\""
+      "                           contentComponent=\"content1 content2\">"
+      "        </SubRepresentation>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+  subRepresentation = (GstMPDSubRepresentationNode *)
+      representation->SubRepresentations->data;
+  assert_equals_uint64 (subRepresentation->level, 100);
+  assert_equals_uint64 (subRepresentation->dependencyLevel_size, 3);
+  assert_equals_uint64 (subRepresentation->dependencyLevel[0], 1);
+  assert_equals_uint64 (subRepresentation->dependencyLevel[1], 2);
+  assert_equals_uint64 (subRepresentation->dependencyLevel[2], 3);
+  assert_equals_uint64 (subRepresentation->bandwidth, 200);
+  assert_equals_string (subRepresentation->contentComponent[0], "content1");
+  assert_equals_string (subRepresentation->contentComponent[1], "content2");
+  assert_equals_pointer (subRepresentation->contentComponent[2], NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Representation SubRepresentation
+ * RepresentationBase attributes
+ */
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_representation_subRepresentation_representationBase)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  GstMPDSubRepresentationNode *subRepresentation;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SubRepresentation>"
+      "        </SubRepresentation>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+  subRepresentation = (GstMPDSubRepresentationNode *)
+      representation->SubRepresentations->data;
+
+  fail_if (subRepresentation == NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Representation SegmentBase attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_representation_segmentBase)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  GstMPDSegmentBaseNode *segmentBase;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentBase>"
+      "        </SegmentBase>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+  segmentBase = representation->SegmentBase;
+  fail_if (segmentBase == NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Representation SegmentList attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_adaptationSet_representation_segmentList)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  GstMPDSegmentListNode *segmentList;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentList duration=\"1\">"
+      "        </SegmentList>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+  segmentList = representation->SegmentList;
+  fail_if (segmentList == NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period AdaptationSet Representation SegmentTemplate attributes
+ *
+ */
+GST_START_TEST
+    (dash_mpdparser_period_adaptationSet_representation_segmentTemplate) {
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  GstMPDSegmentTemplateNode *segmentTemplate;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentTemplate duration=\"1\">"
+      "        </SegmentTemplate>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+  segmentTemplate = representation->SegmentTemplate;
+  fail_if (segmentTemplate == NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing Period Subset attributes
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_subset)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSubsetNode *subset;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period><Subset contains=\"1 2 3\"></Subset></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  subset = (GstMPDSubsetNode *) periodNode->Subsets->data;
+  assert_equals_uint64 (subset->contains_size, 3);
+  assert_equals_uint64 (subset->contains[0], 1);
+  assert_equals_uint64 (subset->contains[1], 2);
+  assert_equals_uint64 (subset->contains[2], 3);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing UTCTiming elements
+ *
+ */
+GST_START_TEST (dash_mpdparser_utctiming)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      " profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "<UTCTiming schemeIdUri=\"urn:mpeg:dash:utc:http-xsdate:2014\" value=\"http://time.akamai.com/?iso http://example.time/xsdate\"/>"
+      "<UTCTiming schemeIdUri=\"urn:mpeg:dash:utc:direct:2014\" value=\"2002-05-30T09:30:10Z \"/>"
+      "<UTCTiming schemeIdUri=\"urn:mpeg:dash:utc:ntp:2014\" value=\"0.europe.pool.ntp.org 1.europe.pool.ntp.org 2.europe.pool.ntp.org 3.europe.pool.ntp.org\"/>"
+      "</MPD>";
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+  GstMPDUTCTimingType selected_method;
+  gchar **urls;
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+
+  assert_equals_int (ret, TRUE);
+  fail_if (mpdclient->mpd_root_node == NULL);
+  fail_if (mpdclient->mpd_root_node->UTCTimings == NULL);
+  assert_equals_int (g_list_length (mpdclient->mpd_root_node->UTCTimings), 3);
+  urls =
+      gst_mpd_client2_get_utc_timing_sources (mpdclient,
+      GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE, &selected_method);
+  fail_if (urls == NULL);
+  assert_equals_int (selected_method, GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE);
+  assert_equals_int (g_strv_length (urls), 2);
+  assert_equals_string (urls[0], "http://time.akamai.com/?iso");
+  assert_equals_string (urls[1], "http://example.time/xsdate");
+  urls =
+      gst_mpd_client2_get_utc_timing_sources (mpdclient,
+      GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE | GST_MPD_UTCTIMING_TYPE_HTTP_ISO,
+      &selected_method);
+  fail_if (urls == NULL);
+  assert_equals_int (selected_method, GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE);
+  urls =
+      gst_mpd_client2_get_utc_timing_sources (mpdclient,
+      GST_MPD_UTCTIMING_TYPE_DIRECT, NULL);
+  fail_if (urls == NULL);
+  assert_equals_int (g_strv_length (urls), 1);
+  assert_equals_string (urls[0], "2002-05-30T09:30:10Z ");
+  urls =
+      gst_mpd_client2_get_utc_timing_sources (mpdclient,
+      GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE | GST_MPD_UTCTIMING_TYPE_DIRECT,
+      &selected_method);
+  fail_if (urls == NULL);
+  assert_equals_int (selected_method, GST_MPD_UTCTIMING_TYPE_HTTP_XSDATE);
+  urls =
+      gst_mpd_client2_get_utc_timing_sources (mpdclient,
+      GST_MPD_UTCTIMING_TYPE_NTP, &selected_method);
+  fail_if (urls == NULL);
+  assert_equals_int (selected_method, GST_MPD_UTCTIMING_TYPE_NTP);
+  assert_equals_int (g_strv_length (urls), 4);
+  assert_equals_string (urls[0], "0.europe.pool.ntp.org");
+  assert_equals_string (urls[1], "1.europe.pool.ntp.org");
+  assert_equals_string (urls[2], "2.europe.pool.ntp.org");
+  assert_equals_string (urls[3], "3.europe.pool.ntp.org");
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing invalid UTCTiming values:
+ * - elements with no schemeIdUri property should be rejected
+ * - elements with no value property should be rejected
+ * - elements with unrecognised UTCTiming scheme should be rejected
+ * - elements with empty values should be rejected
+ *
+ */
+GST_START_TEST (dash_mpdparser_utctiming_invalid_value)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      " profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "<UTCTiming invalid_schemeIdUri=\"dummy.uri.scheme\" value=\"dummy value\"/>"
+      "<UTCTiming schemeIdUri=\"urn:mpeg:dash:utc:ntp:2014\" invalid_value=\"dummy value\"/>"
+      "<UTCTiming schemeIdUri=\"dummy.uri.scheme\" value=\"dummy value\"/>"
+      "<UTCTiming schemeIdUri=\"urn:mpeg:dash:utc:ntp:2014\" value=\"\"/>"
+      "</MPD>";
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+
+  assert_equals_int (ret, TRUE);
+  fail_if (mpdclient->mpd_root_node == NULL);
+  fail_if (mpdclient->mpd_root_node->UTCTimings != NULL);
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing the type property: value "dynamic"
+ *
+ */
+GST_START_TEST (dash_mpdparser_type_dynamic)
+{
+  gboolean isLive;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD type=\"dynamic\" xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\"> </MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  isLive = gst_mpd_client2_is_live (mpdclient);
+  assert_equals_int (isLive, 1);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Validate gst_mpdparser_build_URL_from_template function
+ *
+ */
+GST_START_TEST (dash_mpdparser_template_parsing)
+{
+  const gchar *id = "TestId";
+  guint number = 7;
+  guint bandwidth = 2500;
+  guint64 time = 100;
+  gchar *result;
+
+  struct TestUrl
+  {
+    const gchar *urlTemplate;
+    const gchar *expectedResponse;
+  };
+
+  /* various test scenarios to attempt */
+  struct TestUrl testUrl[] = {
+    {"", NULL},                 /* empty string for template */
+    {"$$", "$"},                /* escaped $ */
+    {"Number", "Number"},       /* string similar with an identifier, but without $ */
+    {"Number$Number$", "Number7"},      /* Number identifier */
+    {"Number$Number$$$", "Number7$"},   /* Number identifier followed by $$ */
+    {"Number$Number$Number$Number$", "Number7Number7"}, /* series of "Number" string and Number identifier */
+    {"Representation$RepresentationID$", "RepresentationTestId"},       /* RepresentationID identifier */
+    {"TestMedia$Bandwidth$$$test", "TestMedia2500$test"},       /* Bandwidth identifier */
+    {"TestMedia$Time$", "TestMedia100"},        /* Time identifier */
+    {"TestMedia$Time", NULL},   /* Identifier not finished with $ */
+    {"Time$Time%d$", NULL},     /* usage of %d (no width) */
+    {"Time$Time%0d$", "Time100"},       /* usage of format smaller than number of digits */
+    {"Time$Time%01d$", "Time100"},      /* usage of format smaller than number of digits */
+    {"Time$Time%05d$", "Time00100"},    /* usage of format bigger than number of digits */
+    {"Time$Time%05dtest$", "Time00100test"},    /* usage extra text in format */
+    {"Time$Time%3d$", NULL},    /* incorrect format: width does not start with 0 */
+    {"Time$Time%0-4d$", NULL},  /* incorrect format: width is not a number */
+    {"Time$Time%0$", NULL},     /* incorrect format: no d, x or u */
+    {"Time$Time1%01d$", NULL},  /* incorrect format: does not start with % after identifier */
+    {"$Bandwidth%/init.mp4v", NULL},    /* incorrect identifier: not finished with $ */
+    {"$Number%/$Time$.mp4v", NULL},     /* incorrect number of $ separators */
+    {"$RepresentationID1$", NULL},      /* incorrect identifier */
+    {"$Bandwidth1$", NULL},     /* incorrect identifier */
+    {"$Number1$", NULL},        /* incorrect identifier */
+    {"$RepresentationID%01d$", NULL},   /* incorrect format: RepresentationID does not support formatting */
+    {"Time$Time%05u$", NULL},   /* %u format */
+    {"Time$Time%05x$", NULL},   /* %x format */
+    {"Time$Time%05utest$", NULL},       /* %u format followed by text */
+    {"Time$Time%05xtest$", NULL},       /* %x format followed by text */
+    {"Time$Time%05xtest%$", NULL},      /* second % character in format */
+  };
+
+  guint count = sizeof (testUrl) / sizeof (testUrl[0]);
+  gint i;
+
+  for (i = 0; i < count; i++) {
+    result =
+        gst_mpdparser_build_URL_from_template (testUrl[i].urlTemplate, id,
+        number, bandwidth, time);
+    assert_equals_string (result, testUrl[i].expectedResponse);
+    g_free (result);
+  }
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling isoff ondemand profile
+ *
+ */
+GST_START_TEST (dash_mpdparser_isoff_ondemand_profile)
+{
+  gboolean hasOnDemandProfile;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-on-demand:2011\"></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  hasOnDemandProfile = gst_mpd_client2_has_isoff_ondemand_profile (mpdclient);
+  assert_equals_int (hasOnDemandProfile, 1);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling GstDateTime
+ *
+ */
+GST_START_TEST (dash_mpdparser_GstDateTime)
+{
+  gint64 delta;
+  GstDateTime *time1;
+  GstDateTime *time2;
+  GstDateTime *time3;
+  GDateTime *g_time2;
+  GDateTime *g_time3;
+
+  time1 = gst_date_time_new_from_iso8601_string ("2012-06-23T23:30:59Z");
+  time2 = gst_date_time_new_from_iso8601_string ("2012-06-23T23:31:00Z");
+
+  delta = gst_mpd_client2_calculate_time_difference (time1, time2);
+  assert_equals_int64 (delta, 1 * GST_SECOND);
+
+  time3 = gst_mpd_client2_add_time_difference (time1, delta);
+
+  /* convert to GDateTime in order to compare time2 and time 3 */
+  g_time2 = gst_date_time_to_g_date_time (time2);
+  g_time3 = gst_date_time_to_g_date_time (time3);
+  fail_if (g_date_time_compare (g_time2, g_time3) != 0);
+
+  gst_date_time_unref (time1);
+  gst_date_time_unref (time2);
+  gst_date_time_unref (time3);
+  g_date_time_unref (g_time2);
+  g_date_time_unref (g_time3);
+}
+
+GST_END_TEST;
+
+/*
+ * Test bitstreamSwitching inheritance from Period to AdaptationSet
+ *
+ * Description of bistreamSwitching attribute in Period:
+ * "When set to ‘true’, this is equivalent as if the
+ * AdaptationSet@bitstreamSwitching for each Adaptation Set contained in this
+ * Period is set to 'true'. In this case, the AdaptationSet@bitstreamSwitching
+ * attribute shall not be set to 'false' for any Adaptation Set in this Period"
+ *
+ */
+GST_START_TEST (dash_mpdparser_bitstreamSwitching_inheritance)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  guint activeStreams;
+  GstActiveStream *activeStream;
+  GstCaps *caps;
+  GstStructure *s;
+  gboolean bitstreamSwitchingFlag;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period id=\"Period0\""
+      "          duration=\"P0Y0M1DT1H1M1S\""
+      "          bitstreamSwitching=\"true\">"
+      "    <AdaptationSet id=\"1\""
+      "                   mimeType=\"video/mp4\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation>"
+      "    </AdaptationSet>"
+      "    <AdaptationSet id=\"2\""
+      "                   mimeType=\"audio\""
+      "                   bitstreamSwitching=\"false\">"
+      "      <Representation id=\"2\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  /* setup streaming from the second adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 1);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  /* 2 active streams */
+  activeStreams = gst_mpd_client2_get_nb_active_stream (mpdclient);
+  assert_equals_int (activeStreams, 2);
+
+  /* get details of the first active stream */
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  assert_equals_int (activeStream->mimeType, GST_STREAM_VIDEO);
+  caps = gst_mpd_client2_get_stream_caps (activeStream);
+  fail_unless (caps != NULL);
+  s = gst_caps_get_structure (caps, 0);
+  assert_equals_string (gst_structure_get_name (s), "video/quicktime");
+  gst_caps_unref (caps);
+
+  /* inherited from Period's bitstreamSwitching */
+  bitstreamSwitchingFlag =
+      gst_mpd_client2_get_bitstream_switching_flag (activeStream);
+  assert_equals_int (bitstreamSwitchingFlag, TRUE);
+
+  /* get details of the second active stream */
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 1);
+  fail_if (activeStream == NULL);
+
+  assert_equals_int (activeStream->mimeType, GST_STREAM_AUDIO);
+  caps = gst_mpd_client2_get_stream_caps (activeStream);
+  fail_unless (caps != NULL);
+  s = gst_caps_get_structure (caps, 0);
+  assert_equals_string (gst_structure_get_name (s), "audio");
+  gst_caps_unref (caps);
+
+  /* set to FALSE in our example, but overwritten to TRUE by Period's
+   * bitstreamSwitching
+   */
+  bitstreamSwitchingFlag =
+      gst_mpd_client2_get_bitstream_switching_flag (activeStream);
+  assert_equals_int (bitstreamSwitchingFlag, TRUE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test various duration formats
+ */
+GST_START_TEST (dash_mpdparser_various_duration_formats)
+{
+  GstMPDPeriodNode *periodNode;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P100Y\">"
+      "  <Period id=\"Period0\" start=\"PT1S\"></Period>"
+      "  <Period id=\"Period1\" start=\"PT1.5S\"></Period>"
+      "  <Period id=\"Period2\" start=\"PT1,7S\"></Period>"
+      "  <Period id=\"Period3\" start=\"PT1M\"></Period>"
+      "  <Period id=\"Period4\" start=\"PT1H\"></Period>"
+      "  <Period id=\"Period5\" start=\"P1D\"></Period>"
+      "  <Period id=\"Period6\" start=\"P1M\"></Period>"
+      "  <Period id=\"Period7\" start=\"P1Y\"></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  periodNode =
+      (GstMPDPeriodNode *) g_list_nth_data (mpdclient->mpd_root_node->Periods,
+      0);
+  assert_equals_string (periodNode->id, "Period0");
+  assert_equals_uint64 (periodNode->start,
+      duration_to_ms (0, 0, 0, 0, 0, 1, 0));
+
+  periodNode =
+      (GstMPDPeriodNode *) g_list_nth_data (mpdclient->mpd_root_node->Periods,
+      1);
+  assert_equals_string (periodNode->id, "Period1");
+  assert_equals_uint64 (periodNode->start,
+      duration_to_ms (0, 0, 0, 0, 0, 1, 500));
+
+  periodNode =
+      (GstMPDPeriodNode *) g_list_nth_data (mpdclient->mpd_root_node->Periods,
+      2);
+  assert_equals_string (periodNode->id, "Period2");
+  assert_equals_uint64 (periodNode->start,
+      duration_to_ms (0, 0, 0, 0, 0, 1, 700));
+
+  periodNode =
+      (GstMPDPeriodNode *) g_list_nth_data (mpdclient->mpd_root_node->Periods,
+      3);
+  assert_equals_string (periodNode->id, "Period3");
+  assert_equals_uint64 (periodNode->start,
+      duration_to_ms (0, 0, 0, 0, 1, 0, 0));
+
+  periodNode =
+      (GstMPDPeriodNode *) g_list_nth_data (mpdclient->mpd_root_node->Periods,
+      4);
+  assert_equals_string (periodNode->id, "Period4");
+  assert_equals_uint64 (periodNode->start,
+      duration_to_ms (0, 0, 0, 1, 0, 0, 0));
+
+  periodNode =
+      (GstMPDPeriodNode *) g_list_nth_data (mpdclient->mpd_root_node->Periods,
+      5);
+  assert_equals_string (periodNode->id, "Period5");
+  assert_equals_uint64 (periodNode->start,
+      duration_to_ms (0, 0, 1, 0, 0, 0, 0));
+
+  periodNode =
+      (GstMPDPeriodNode *) g_list_nth_data (mpdclient->mpd_root_node->Periods,
+      6);
+  assert_equals_string (periodNode->id, "Period6");
+  assert_equals_uint64 (periodNode->start,
+      duration_to_ms (0, 1, 0, 0, 0, 0, 0));
+
+  periodNode =
+      (GstMPDPeriodNode *) g_list_nth_data (mpdclient->mpd_root_node->Periods,
+      7);
+  assert_equals_string (periodNode->id, "Period7");
+  assert_equals_uint64 (periodNode->start,
+      duration_to_ms (1, 0, 0, 0, 0, 0, 0));
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test media presentation setup
+ *
+ */
+GST_START_TEST (dash_mpdparser_setup_media_presentation)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period id=\"Period0\""
+      "          duration=\"P0Y0M1DT1H1M1S\"></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test setting a stream
+ *
+ */
+GST_START_TEST (dash_mpdparser_setup_streaming)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period id=\"Period0\""
+      "          duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"1\""
+      "                   mimeType=\"video/mp4\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the first adaptation set of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+  adapt_set = (GstMPDAdaptationSetNode *) adaptationSets->data;
+  fail_if (adapt_set == NULL);
+
+  /* setup streaming from the adaptation set */
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling Period selection
+ *
+ */
+GST_START_TEST (dash_mpdparser_period_selection)
+{
+  const gchar *periodName;
+  guint periodIndex;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     mediaPresentationDuration=\"P0Y0M1DT1H4M3S\">"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\"></Period>"
+      "  <Period id=\"Period1\"></Period>"
+      "  <Period id=\"Period2\" start=\"P0Y0M1DT1H3M3S\"></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* period_idx should be 0 and we should have no active periods */
+  assert_equals_uint64 (mpdclient->period_idx, 0);
+  fail_unless (mpdclient->periods == NULL);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* check the periods */
+  fail_unless (mpdclient->periods != NULL);
+  periodName = gst_mpd_client2_get_period_id (mpdclient);
+  assert_equals_string (periodName, "Period0");
+
+  ret = gst_mpd_client2_set_period_index (mpdclient, 1);
+  assert_equals_int (ret, TRUE);
+  periodName = gst_mpd_client2_get_period_id (mpdclient);
+  assert_equals_string (periodName, "Period1");
+
+  ret = gst_mpd_client2_set_period_index (mpdclient, 2);
+  assert_equals_int (ret, TRUE);
+  periodName = gst_mpd_client2_get_period_id (mpdclient);
+  assert_equals_string (periodName, "Period2");
+
+  ret = gst_mpd_client2_has_next_period (mpdclient);
+  assert_equals_int (ret, FALSE);
+  ret = gst_mpd_client2_has_previous_period (mpdclient);
+  assert_equals_int (ret, TRUE);
+
+  ret = gst_mpd_client2_set_period_index (mpdclient, 0);
+  assert_equals_int (ret, TRUE);
+  ret = gst_mpd_client2_has_next_period (mpdclient);
+  assert_equals_int (ret, TRUE);
+  ret = gst_mpd_client2_has_previous_period (mpdclient);
+  assert_equals_int (ret, FALSE);
+
+  ret = gst_mpd_client2_set_period_id (mpdclient, "Period1");
+  assert_equals_int (ret, TRUE);
+  periodIndex = gst_mpd_client2_get_period_index (mpdclient);
+  assert_equals_uint64 (periodIndex, 1);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling Period selection based on time
+ *
+ */
+GST_START_TEST (dash_mpdparser_get_period_at_time)
+{
+  guint periodIndex;
+  GstDateTime *time;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M1DT1H4M3S\">"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\"></Period>"
+      "  <Period id=\"Period1\"></Period>"
+      "  <Period id=\"Period2\" start=\"P0Y0M1DT1H3M3S\"></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* request period for a time before availabilityStartTime, expect period index 0 */
+  time = gst_date_time_new_from_iso8601_string ("2015-03-23T23:30:59Z");
+  periodIndex = gst_mpd_client2_get_period_index_at_time (mpdclient, time);
+  gst_date_time_unref (time);
+  assert_equals_int (periodIndex, 0);
+
+  /* request period for a time from period 0 */
+  time = gst_date_time_new_from_iso8601_string ("2015-03-24T23:30:59Z");
+  periodIndex = gst_mpd_client2_get_period_index_at_time (mpdclient, time);
+  gst_date_time_unref (time);
+  assert_equals_int (periodIndex, 0);
+
+  /* request period for a time from period 1 */
+  time = gst_date_time_new_from_iso8601_string ("2015-03-25T1:1:1Z");
+  periodIndex = gst_mpd_client2_get_period_index_at_time (mpdclient, time);
+  gst_date_time_unref (time);
+  assert_equals_int (periodIndex, 1);
+
+  /* request period for a time from period 2 */
+  time = gst_date_time_new_from_iso8601_string ("2015-03-25T1:3:3Z");
+  periodIndex = gst_mpd_client2_get_period_index_at_time (mpdclient, time);
+  gst_date_time_unref (time);
+  assert_equals_int (periodIndex, 2);
+
+  /* request period for a time after mediaPresentationDuration, expect period index G_MAXUINT */
+  time = gst_date_time_new_from_iso8601_string ("2015-03-25T1:4:3Z");
+  periodIndex = gst_mpd_client2_get_period_index_at_time (mpdclient, time);
+  gst_date_time_unref (time);
+  assert_equals_int (periodIndex, G_MAXUINT);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling Adaptation sets
+ *
+ */
+GST_START_TEST (dash_mpdparser_adaptationSet_handling)
+{
+  const gchar *periodName;
+  guint adaptation_sets_count;
+  GList *adaptationSets, *it;
+  guint count = 0;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"1\"></AdaptationSet>"
+      "  </Period>"
+      "  <Period id=\"Period1\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"10\"></AdaptationSet>"
+      "    <AdaptationSet id=\"11\"></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* period0 has 1 adaptation set */
+  fail_unless (mpdclient->periods != NULL);
+  periodName = gst_mpd_client2_get_period_id (mpdclient);
+  assert_equals_string (periodName, "Period0");
+  adaptation_sets_count = gst_mpd_client2_get_nb_adaptationSet (mpdclient);
+  assert_equals_int (adaptation_sets_count, 1);
+
+  /* period1 has 2 adaptation set */
+  ret = gst_mpd_client2_set_period_id (mpdclient, "Period1");
+  assert_equals_int (ret, TRUE);
+  adaptation_sets_count = gst_mpd_client2_get_nb_adaptationSet (mpdclient);
+  assert_equals_int (adaptation_sets_count, 2);
+
+  /* check the id for the 2 adaptation sets from period 1 */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  for (it = adaptationSets; it; it = g_list_next (it)) {
+    GstMPDAdaptationSetNode *adapt_set;
+    adapt_set = (GstMPDAdaptationSetNode *) it->data;
+    fail_if (adapt_set == NULL);
+
+    assert_equals_int (adapt_set->id, 10 + count);
+    count++;
+  }
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling Representation selection
+ *
+ */
+GST_START_TEST (dash_mpdparser_representation_selection)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adaptationSetNode;
+  GList *representations;
+  gint represendationIndex;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"1\" mimeType=\"video/mp4\">"
+      "      <Representation id=\"v0\" bandwidth=\"500000\"></Representation>"
+      "      <Representation id=\"v1\" bandwidth=\"250000\"></Representation>"
+      "    </AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  adaptationSetNode = adaptationSets->data;
+  fail_if (adaptationSetNode == NULL);
+  assert_equals_int (adaptationSetNode->id, 1);
+
+  representations = adaptationSetNode->Representations;
+  fail_if (representations == NULL);
+
+  represendationIndex =
+      gst_mpd_client2_get_rep_idx_with_min_bandwidth (representations);
+  assert_equals_int (represendationIndex, 1);
+
+  represendationIndex =
+      gst_mpd_client2_get_rep_idx_with_max_bandwidth (representations, 0, 0, 0,
+      0, 1);
+  assert_equals_int (represendationIndex, 1);
+
+  represendationIndex =
+      gst_mpd_client2_get_rep_idx_with_max_bandwidth (representations, 100000,
+      0, 0, 0, 1);
+  assert_equals_int (represendationIndex, -1);
+
+  represendationIndex =
+      gst_mpd_client2_get_rep_idx_with_max_bandwidth (representations, 300000,
+      0, 0, 0, 1);
+  assert_equals_int (represendationIndex, 1);
+
+  represendationIndex =
+      gst_mpd_client2_get_rep_idx_with_max_bandwidth (representations, 500000,
+      0, 0, 0, 1);
+  assert_equals_int (represendationIndex, 0);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling Active stream selection
+ *
+ */
+GST_START_TEST (dash_mpdparser_activeStream_selection)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  guint activeStreams;
+  GstActiveStream *activeStream;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"1\" mimeType=\"video/mp4\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation>"
+      "    </AdaptationSet>"
+      "    <AdaptationSet id=\"2\" mimeType=\"audio\">"
+      "      <Representation id=\"2\" bandwidth=\"250000\">"
+      "      </Representation>"
+      "    </AdaptationSet>"
+      "    <AdaptationSet id=\"3\" mimeType=\"application\">"
+      "      <Representation id=\"3\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* no active streams yet */
+  activeStreams = gst_mpd_client2_get_nb_active_stream (mpdclient);
+  assert_equals_int (activeStreams, 0);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  /* 1 active streams */
+  activeStreams = gst_mpd_client2_get_nb_active_stream (mpdclient);
+  assert_equals_int (activeStreams, 1);
+
+  /* setup streaming from the second adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 1);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  /* 2 active streams */
+  activeStreams = gst_mpd_client2_get_nb_active_stream (mpdclient);
+  assert_equals_int (activeStreams, 2);
+
+  /* setup streaming from the third adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 2);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  /* 3 active streams */
+  activeStreams = gst_mpd_client2_get_nb_active_stream (mpdclient);
+  assert_equals_int (activeStreams, 3);
+
+  /* get details of the first active stream */
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+  assert_equals_int (activeStream->mimeType, GST_STREAM_VIDEO);
+
+  /* get details of the second active stream */
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 1);
+  fail_if (activeStream == NULL);
+  assert_equals_int (activeStream->mimeType, GST_STREAM_AUDIO);
+
+  /* get details of the third active stream */
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 2);
+  fail_if (activeStream == NULL);
+  assert_equals_int (activeStream->mimeType, GST_STREAM_APPLICATION);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test getting Active stream parameters
+ *
+ */
+GST_START_TEST (dash_mpdparser_activeStream_parameters)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  guint activeStreams;
+  GstActiveStream *activeStream;
+  GstCaps *caps;
+  GstStructure *s;
+  gboolean bitstreamSwitchingFlag;
+  guint videoStreamWidth;
+  guint videoStreamHeight;
+  guint audioStreamRate;
+  guint audioChannelsCount;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period id=\"Period0\""
+      "          duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"1\""
+      "                   mimeType=\"video/mp4\""
+      "                   width=\"320\""
+      "                   height=\"240\""
+      "                   bitstreamSwitching=\"true\""
+      "                   audioSamplingRate=\"48000\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  /* 1 active streams */
+  activeStreams = gst_mpd_client2_get_nb_active_stream (mpdclient);
+  assert_equals_int (activeStreams, 1);
+
+  /* get details of the first active stream */
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  assert_equals_int (activeStream->mimeType, GST_STREAM_VIDEO);
+  caps = gst_mpd_client2_get_stream_caps (activeStream);
+  fail_unless (caps != NULL);
+  s = gst_caps_get_structure (caps, 0);
+  assert_equals_string (gst_structure_get_name (s), "video/quicktime");
+  gst_caps_unref (caps);
+
+  bitstreamSwitchingFlag =
+      gst_mpd_client2_get_bitstream_switching_flag (activeStream);
+  assert_equals_int (bitstreamSwitchingFlag, 1);
+
+  videoStreamWidth = gst_mpd_client2_get_video_stream_width (activeStream);
+  assert_equals_int (videoStreamWidth, 320);
+
+  videoStreamHeight = gst_mpd_client2_get_video_stream_height (activeStream);
+  assert_equals_int (videoStreamHeight, 240);
+
+  audioStreamRate = gst_mpd_client2_get_audio_stream_rate (activeStream);
+  assert_equals_int (audioStreamRate, 48000);
+
+  audioChannelsCount =
+      gst_mpd_client2_get_audio_stream_num_channels (activeStream);
+  assert_equals_int (audioChannelsCount, 0);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test getting number and list of audio languages
+ *
+ */
+GST_START_TEST (dash_mpdparser_get_audio_languages)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  guint activeStreams;
+  guint adaptationSetsCount;
+  GList *languages = NULL;
+  guint languagesCount;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"1\" mimeType=\"audio\" lang=\"en\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation>"
+      "    </AdaptationSet>"
+      "    <AdaptationSet id=\"2\" mimeType=\"video/mp4\">"
+      "      <Representation id=\"2\" bandwidth=\"250000\">"
+      "      </Representation>"
+      "    </AdaptationSet>"
+      "    <AdaptationSet id=\"3\" mimeType=\"audio\" lang=\"fr\">"
+      "      <Representation id=\"3\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+  gint i;
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from all adaptation sets */
+  adaptationSetsCount = gst_mpd_client2_get_nb_adaptationSet (mpdclient);
+  for (i = 0; i < adaptationSetsCount; i++) {
+    adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, i);
+    fail_if (adapt_set == NULL);
+    ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+    assert_equals_int (ret, TRUE);
+  }
+  activeStreams = gst_mpd_client2_get_nb_active_stream (mpdclient);
+  assert_equals_int (activeStreams, adaptationSetsCount);
+
+  languagesCount =
+      gst_mpd_client2_get_list_and_nb_of_audio_language (mpdclient, &languages);
+  assert_equals_int (languagesCount, 2);
+  assert_equals_string ((gchar *) g_list_nth_data (languages, 0), "en");
+  assert_equals_string ((gchar *) g_list_nth_data (languages, 1), "fr");
+
+  g_list_free (languages);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Tests getting the base URL
+ *
+ */
+static GstMPDClient2 *
+setup_mpd_client (const gchar * xml)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  guint activeStreams;
+  guint adaptationSetsCount;
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+  gint i;
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from all adaptation sets */
+  adaptationSetsCount = gst_mpd_client2_get_nb_adaptationSet (mpdclient);
+  for (i = 0; i < adaptationSetsCount; i++) {
+    adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, i);
+    fail_if (adapt_set == NULL);
+    ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+    assert_equals_int (ret, TRUE);
+  }
+  activeStreams = gst_mpd_client2_get_nb_active_stream (mpdclient);
+  assert_equals_int (activeStreams, adaptationSetsCount);
+
+  return mpdclient;
+}
+
+GST_START_TEST (dash_mpdparser_get_baseURL1)
+{
+  const gchar *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <BaseURL>http://example.com/</BaseURL>"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"1\" mimeType=\"audio\" lang=\"en\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  GstMPDClient2 *mpdclient = setup_mpd_client (xml);
+
+  baseURL = gst_mpd_client2_get_baseURL (mpdclient, 0);
+  fail_if (baseURL == NULL);
+  assert_equals_string (baseURL, "http://example.com/");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+
+GST_START_TEST (dash_mpdparser_get_baseURL2)
+{
+  const gchar *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <BaseURL>mpd_base_url/</BaseURL>"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <BaseURL> /period_base_url/</BaseURL>"
+      "    <AdaptationSet id=\"1\" mimeType=\"audio\" lang=\"en\">"
+      "      <BaseURL>adaptation_base_url</BaseURL>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <BaseURL>representation_base_url</BaseURL>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  GstMPDClient2 *mpdclient = setup_mpd_client (xml);
+
+  /* test baseURL. Its value should be computed like this:
+   *  - start with xml url (null)
+   *  - set it to the value from MPD's BaseURL element: "mpd_base_url/"
+   *  - update the value with BaseURL element from Period. Because Period's
+   * baseURL is absolute (starts with /) it will overwrite the current value
+   * for baseURL. So, baseURL becomes "/period_base_url/"
+   *  - update the value with BaseURL element from AdaptationSet. Because this
+   * is a relative url, it will update the current value. baseURL becomes
+   * "/period_base_url/adaptation_base_url"
+   *  - update the value with BaseURL element from Representation. Because this
+   * is a relative url, it will update the current value. Because the current
+   * value does not end in /, everything after the last / will be overwritten.
+   * baseURL becomes "/period_base_url/representation_base_url"
+   */
+  baseURL = gst_mpd_client2_get_baseURL (mpdclient, 0);
+  fail_if (baseURL == NULL);
+  assert_equals_string (baseURL, "/period_base_url/representation_base_url");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+
+GST_START_TEST (dash_mpdparser_get_baseURL3)
+{
+  const gchar *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <BaseURL>mpd_base_url/</BaseURL>"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <BaseURL> /period_base_url/</BaseURL>"
+      "    <AdaptationSet id=\"1\" mimeType=\"audio\" lang=\"en\">"
+      "      <BaseURL>adaptation_base_url</BaseURL>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <BaseURL>/representation_base_url</BaseURL>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  GstMPDClient2 *mpdclient = setup_mpd_client (xml);
+
+  /* test baseURL. Its value should be computed like this:
+   *  - start with xml url (null)
+   *  - set it to the value from MPD's BaseURL element: "mpd_base_url/"
+   *  - update the value with BaseURL element from Period. Because Period's
+   * baseURL is absolute (starts with /) it will overwrite the current value
+   * for baseURL. So, baseURL becomes "/period_base_url/"
+   *  - update the value with BaseURL element from AdaptationSet. Because this
+   * is a relative url, it will update the current value. baseURL becomes
+   * "/period_base_url/adaptation_base_url"
+   *  - update the value with BaseURL element from Representation. Because this
+   * is an absolute url, it will replace everything again"
+   */
+  baseURL = gst_mpd_client2_get_baseURL (mpdclient, 0);
+  fail_if (baseURL == NULL);
+  assert_equals_string (baseURL, "/representation_base_url");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+
+GST_START_TEST (dash_mpdparser_get_baseURL4)
+{
+  const gchar *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <BaseURL>mpd_base_url/</BaseURL>"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <BaseURL> /period_base_url/</BaseURL>"
+      "    <AdaptationSet id=\"1\" mimeType=\"audio\" lang=\"en\">"
+      "      <BaseURL>adaptation_base_url/</BaseURL>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <BaseURL>representation_base_url/</BaseURL>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  GstMPDClient2 *mpdclient = setup_mpd_client (xml);
+
+  /* test baseURL. Its value should be computed like this:
+   *  - start with xml url (null)
+   *  - set it to the value from MPD's BaseURL element: "mpd_base_url/"
+   *  - update the value with BaseURL element from Period. Because Period's
+   * baseURL is absolute (starts with /) it will overwrite the current value
+   * for baseURL. So, baseURL becomes "/period_base_url/"
+   *  - update the value with BaseURL element from AdaptationSet. Because this
+   * is a relative url, it will update the current value. baseURL becomes
+   * "/period_base_url/adaptation_base_url/"
+   *  - update the value with BaseURL element from Representation. Because this
+   * is an relative url, it will update the current value."
+   */
+  baseURL = gst_mpd_client2_get_baseURL (mpdclient, 0);
+  fail_if (baseURL == NULL);
+  assert_equals_string (baseURL,
+      "/period_base_url/adaptation_base_url/representation_base_url/");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/* test multiple BaseUrl entries per section */
+GST_START_TEST (dash_mpdparser_get_baseURL5)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  const gchar *baseURL;
+  GstMPDBaseURLNode *gstBaseURL;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <BaseURL>/mpd_base_url1/</BaseURL>"
+      "  <BaseURL>/mpd_base_url2/</BaseURL>"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <BaseURL> period_base_url1/</BaseURL>"
+      "    <BaseURL> period_base_url2/</BaseURL>"
+      "    <BaseURL> period_base_url3/</BaseURL>"
+      "    <AdaptationSet id=\"1\" mimeType=\"audio\" lang=\"en\">"
+      "      <BaseURL>adaptation_base_url1/</BaseURL>"
+      "      <BaseURL>adaptation_base_url2/</BaseURL>"
+      "      <BaseURL>adaptation_base_url3/</BaseURL>"
+      "      <BaseURL>adaptation_base_url4/</BaseURL>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <BaseURL>representation_base_url1/</BaseURL>"
+      "        <BaseURL>representation_base_url2/</BaseURL>"
+      "        <BaseURL>representation_base_url3/</BaseURL>"
+      "        <BaseURL>representation_base_url4/</BaseURL>"
+      "        <BaseURL>representation_base_url5/</BaseURL>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  GstMPDClient2 *mpdclient = setup_mpd_client (xml);
+
+  assert_equals_int (g_list_length (mpdclient->mpd_root_node->BaseURLs), 2);
+  gstBaseURL = g_list_nth_data (mpdclient->mpd_root_node->BaseURLs, 0);
+  assert_equals_string (gstBaseURL->baseURL, "/mpd_base_url1/");
+  gstBaseURL = g_list_nth_data (mpdclient->mpd_root_node->BaseURLs, 1);
+  assert_equals_string (gstBaseURL->baseURL, "/mpd_base_url2/");
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  assert_equals_int (g_list_length (periodNode->BaseURLs), 3);
+  gstBaseURL = g_list_nth_data (periodNode->BaseURLs, 0);
+  assert_equals_string (gstBaseURL->baseURL, " period_base_url1/");
+  gstBaseURL = g_list_nth_data (periodNode->BaseURLs, 1);
+  assert_equals_string (gstBaseURL->baseURL, " period_base_url2/");
+  gstBaseURL = g_list_nth_data (periodNode->BaseURLs, 2);
+  assert_equals_string (gstBaseURL->baseURL, " period_base_url3/");
+
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  assert_equals_int (g_list_length (adaptationSet->BaseURLs), 4);
+  gstBaseURL = g_list_nth_data (adaptationSet->BaseURLs, 0);
+  assert_equals_string (gstBaseURL->baseURL, "adaptation_base_url1/");
+  gstBaseURL = g_list_nth_data (adaptationSet->BaseURLs, 1);
+  assert_equals_string (gstBaseURL->baseURL, "adaptation_base_url2/");
+  gstBaseURL = g_list_nth_data (adaptationSet->BaseURLs, 2);
+  assert_equals_string (gstBaseURL->baseURL, "adaptation_base_url3/");
+  gstBaseURL = g_list_nth_data (adaptationSet->BaseURLs, 3);
+  assert_equals_string (gstBaseURL->baseURL, "adaptation_base_url4/");
+
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+  assert_equals_int (g_list_length (representation->BaseURLs), 5);
+  gstBaseURL = g_list_nth_data (representation->BaseURLs, 0);
+  assert_equals_string (gstBaseURL->baseURL, "representation_base_url1/");
+  gstBaseURL = g_list_nth_data (representation->BaseURLs, 1);
+  assert_equals_string (gstBaseURL->baseURL, "representation_base_url2/");
+  gstBaseURL = g_list_nth_data (representation->BaseURLs, 2);
+  assert_equals_string (gstBaseURL->baseURL, "representation_base_url3/");
+  gstBaseURL = g_list_nth_data (representation->BaseURLs, 3);
+  assert_equals_string (gstBaseURL->baseURL, "representation_base_url4/");
+  gstBaseURL = g_list_nth_data (representation->BaseURLs, 4);
+  assert_equals_string (gstBaseURL->baseURL, "representation_base_url5/");
+
+  /* test baseURL. Its value should be computed like this:
+   *  - start with xml url (null)
+   *  - set it to the value from MPD's BaseURL element: "/mpd_base_url1/"
+   *  - update the value with BaseURL element from Period. Because this
+   * is a relative url, it will update the current value. baseURL becomes
+   * "/mpd_base_url1/period_base_url1/"
+   *  - update the value with BaseURL element from AdaptationSet. Because this
+   * is a relative url, it will update the current value. baseURL becomes
+   * "/mpd_base_url1/period_base_url1/adaptation_base_url1/"
+   *  - update the value with BaseURL element from Representation. Because this
+   * is an relative url, it will update the current value."
+   */
+  baseURL = gst_mpd_client2_get_baseURL (mpdclient, 0);
+  fail_if (baseURL == NULL);
+  assert_equals_string (baseURL,
+      "/mpd_base_url1/period_base_url1/adaptation_base_url1/representation_base_url1/");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/* test no BaseURL */
+GST_START_TEST (dash_mpdparser_get_baseURL6)
+{
+  const gchar *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"1\" mimeType=\"audio\" lang=\"en\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  GstMPDClient2 *mpdclient = setup_mpd_client (xml);
+
+  baseURL = gst_mpd_client2_get_baseURL (mpdclient, 0);
+  fail_if (baseURL == NULL);
+  assert_equals_string (baseURL, "");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/* BaseURL: test that the path is made absolute (a / is prepended if needed */
+GST_START_TEST (dash_mpdparser_get_baseURL7)
+{
+  const gchar *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <BaseURL>x/example.com/</BaseURL>"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"1\" mimeType=\"audio\" lang=\"en\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  GstMPDClient2 *mpdclient;
+
+  mpdclient = setup_mpd_client (xml);
+
+  baseURL = gst_mpd_client2_get_baseURL (mpdclient, 0);
+  fail_if (baseURL == NULL);
+  assert_equals_string (baseURL, "/x/example.com/");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/* BaseURL: test that a / is not prepended if the string contains ':'
+ * This tests uris with schema present */
+GST_START_TEST (dash_mpdparser_get_baseURL8)
+{
+  const gchar *baseURL;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <BaseURL>x:y/example.com/</BaseURL>"
+      "  <Period id=\"Period0\" duration=\"P0Y0M1DT1H1M1S\">"
+      "    <AdaptationSet id=\"1\" mimeType=\"audio\" lang=\"en\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  GstMPDClient2 *mpdclient = setup_mpd_client (xml);
+
+  baseURL = gst_mpd_client2_get_baseURL (mpdclient, 0);
+  fail_if (baseURL == NULL);
+  assert_equals_string (baseURL, "x:y/example.com/");
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test getting mediaPresentationDuration
+ *
+ */
+GST_START_TEST (dash_mpdparser_get_mediaPresentationDuration)
+{
+  GstClockTime mediaPresentationDuration;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     mediaPresentationDuration=\"P0Y0M0DT0H0M3S\"></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  mediaPresentationDuration =
+      gst_mpd_client2_get_media_presentation_duration (mpdclient);
+  assert_equals_uint64 (mediaPresentationDuration, 3000000000);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test getting streamPresentationOffset
+ *
+ */
+GST_START_TEST (dash_mpdparser_get_streamPresentationOffset)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  GstClockTime offset;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period>"
+      "    <AdaptationSet mimeType=\"video/mp4\">"
+      "      <SegmentBase timescale=\"1000\" presentationTimeOffset=\"3000\">"
+      "      </SegmentBase>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  /* test the stream presentation time offset */
+  offset = gst_mpd_client2_get_stream_presentation_offset (mpdclient, 0);
+  /* seems to be set only for template segments, so here it is 0 */
+  assert_equals_int (offset, 0);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling segments
+ *
+ */
+GST_START_TEST (dash_mpdparser_segments)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  gboolean hasNextSegment;
+  GstActiveStream *activeStream;
+  GstFlowReturn flow;
+  GstDateTime *segmentAvailability;
+  GstDateTime *gst_time;
+  GDateTime *g_time;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     type=\"dynamic\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period id=\"Period0\" start=\"P0Y0M0DT0H0M10S\">"
+      "    <AdaptationSet mimeType=\"video/mp4\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentList duration=\"45\">"
+      "          <SegmentURL media=\"TestMedia1\""
+      "                      mediaRange=\"10-20\""
+      "                      index=\"TestIndex1\""
+      "                      indexRange=\"30-40\">"
+      "          </SegmentURL>"
+      "          <SegmentURL media=\"TestMedia2\""
+      "                      mediaRange=\"20-30\""
+      "                      index=\"TestIndex2\""
+      "                      indexRange=\"40-50\">"
+      "          </SegmentURL>"
+      "        </SegmentList>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  /* segment_index 0, segment_count 2.
+   * Has next segment and can advance to next segment
+   */
+  hasNextSegment =
+      gst_mpd_client2_has_next_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (hasNextSegment, 1);
+  flow = gst_mpd_client2_advance_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (flow, GST_FLOW_OK);
+
+  /* segment_index 1, segment_count 2.
+   * Does not have next segment and can not advance to next segment
+   */
+  hasNextSegment =
+      gst_mpd_client2_has_next_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (hasNextSegment, 0);
+  flow = gst_mpd_client2_advance_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (flow, GST_FLOW_EOS);
+
+  /* go to first segment */
+  gst_mpd_client2_seek_to_first_segment (mpdclient);
+
+  /* segment_index 0, segment_count 2.
+   * Has next segment and can advance to next segment
+   */
+  hasNextSegment =
+      gst_mpd_client2_has_next_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (hasNextSegment, 1);
+  flow = gst_mpd_client2_advance_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (flow, GST_FLOW_OK);
+
+  /* segment_index 1, segment_count 2
+   * Does not have next segment
+   */
+  hasNextSegment =
+      gst_mpd_client2_has_next_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (hasNextSegment, 0);
+
+  /* segment index is still 1 */
+  hasNextSegment =
+      gst_mpd_client2_has_next_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (hasNextSegment, 0);
+
+  /* each segment has a duration of 0 hours, 0 min 45 seconds
+   * segment index is 1.
+   * Start time is at the beginning of segment 1, so 1 * segment_duration = 1 * 45s
+   * Availability start time is at the end of the segment, so we add duration (45s)
+   * We also add period start time (10s)
+   * So, availability start time for segment 1 is: 10 (period start) +
+   * 45 (segment start) + 45 (duration) = 1'40s
+   */
+  segmentAvailability =
+      gst_mpd_client2_get_next_segment_availability_start_time (mpdclient,
+      activeStream);
+  assert_equals_int (gst_date_time_get_year (segmentAvailability), 2015);
+  assert_equals_int (gst_date_time_get_month (segmentAvailability), 3);
+  assert_equals_int (gst_date_time_get_day (segmentAvailability), 24);
+  assert_equals_int (gst_date_time_get_hour (segmentAvailability), 0);
+  assert_equals_int (gst_date_time_get_minute (segmentAvailability), 1);
+  assert_equals_int (gst_date_time_get_second (segmentAvailability), 40);
+  gst_date_time_unref (segmentAvailability);
+
+  /* seek to time */
+  gst_time = gst_date_time_new_from_iso8601_string ("2015-03-24T0:0:20Z");
+  g_time = gst_date_time_to_g_date_time (gst_time);
+  ret = gst_mpd_client2_seek_to_time (mpdclient, g_time);
+  assert_equals_int (ret, 1);
+  gst_date_time_unref (gst_time);
+  g_date_time_unref (g_time);
+
+  /* segment index is now 0 */
+  hasNextSegment =
+      gst_mpd_client2_has_next_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (hasNextSegment, 1);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling headers
+ *
+ */
+GST_START_TEST (dash_mpdparser_headers)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  gchar *uri;
+  gint64 range_start;
+  gint64 range_end;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     type=\"dynamic\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period id=\"Period0\">"
+      "    <AdaptationSet mimeType=\"video/mp4\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentBase indexRange=\"10-20\">"
+      "          <Initialization sourceURL=\"TestSourceUrl\""
+      "                          range=\"100-200\">"
+      "          </Initialization>"
+      "          <RepresentationIndex sourceURL=\"TestSourceIndex\">"
+      "          </RepresentationIndex>"
+      "        </SegmentBase>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  /* get segment url and range from segment Initialization */
+  ret =
+      gst_mpd_client2_get_next_header (mpdclient, &uri, 0, &range_start,
+      &range_end);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (uri, "TestSourceUrl");
+  assert_equals_int64 (range_start, 100);
+  assert_equals_int64 (range_end, 200);
+  g_free (uri);
+
+  /* get segment url and range from segment indexRange */
+  ret =
+      gst_mpd_client2_get_next_header_index (mpdclient, &uri, 0, &range_start,
+      &range_end);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (uri, "TestSourceIndex");
+  assert_equals_int64 (range_start, 10);
+  assert_equals_int64 (range_end, 20);
+  g_free (uri);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling fragments
+ *
+ */
+GST_START_TEST (dash_mpdparser_fragments)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  GstMediaFragmentInfo fragment;
+  GstActiveStream *activeStream;
+  GstClockTime nextFragmentDuration;
+  GstClockTime nextFragmentTimestamp;
+  GstClockTime nextFragmentTimestampEnd;
+  GstClockTime periodStartTime;
+  GstClockTime expectedDuration;
+  GstClockTime expectedTimestamp;
+  GstClockTime expectedTimestampEnd;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period id=\"Period0\" start=\"P0Y0M0DT0H0M10S\">"
+      "    <AdaptationSet mimeType=\"video/mp4\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  /* expected duration of the next fragment */
+  expectedDuration = duration_to_ms (0, 0, 0, 3, 3, 20, 0);
+  expectedTimestamp = duration_to_ms (0, 0, 0, 0, 0, 0, 0);
+  expectedTimestampEnd = duration_to_ms (0, 0, 0, 3, 3, 20, 0);
+
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (fragment.uri, "");
+  assert_equals_int64 (fragment.range_start, 0);
+  assert_equals_int64 (fragment.range_end, -1);
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  periodStartTime = gst_mpd_client2_get_period_start_time (mpdclient);
+  assert_equals_uint64 (periodStartTime, 10 * GST_SECOND);
+
+  nextFragmentDuration =
+      gst_mpd_client2_get_next_fragment_duration (mpdclient, activeStream);
+  assert_equals_uint64 (nextFragmentDuration, expectedDuration * GST_MSECOND);
+
+  ret =
+      gst_mpd_client2_get_next_fragment_timestamp (mpdclient, 0,
+      &nextFragmentTimestamp);
+  assert_equals_int (ret, TRUE);
+  assert_equals_uint64 (nextFragmentTimestamp, expectedTimestamp * GST_MSECOND);
+
+  ret =
+      gst_mpd_client2_get_last_fragment_timestamp_end (mpdclient, 0,
+      &nextFragmentTimestampEnd);
+  assert_equals_int (ret, TRUE);
+  assert_equals_uint64 (nextFragmentTimestampEnd,
+      expectedTimestampEnd * GST_MSECOND);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test inheriting segmentBase from parent
+ *
+ */
+GST_START_TEST (dash_mpdparser_inherited_segmentBase)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentBaseNode *segmentBase;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\">"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <SegmentBase timescale=\"100\">"
+      "      </SegmentBase>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentBase timescale=\"200\">"
+      "        </SegmentBase>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+
+  /* test segment base from adaptation set */
+  segmentBase = adaptationSet->SegmentBase;
+  assert_equals_uint64 (segmentBase->timescale, 100);
+
+  /* test segment base from representation */
+  segmentBase = representation->SegmentBase;
+  assert_equals_uint64 (segmentBase->timescale, 200);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test inheriting segmentURL from parent
+ *
+ */
+GST_START_TEST (dash_mpdparser_inherited_segmentURL)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  GstActiveStream *activeStream;
+  GstMediaFragmentInfo fragment;
+  GstClockTime expectedDuration;
+  GstClockTime expectedTimestamp;
+  GstFlowReturn flow;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period>"
+      "    <AdaptationSet mimeType=\"video/mp4\">"
+      "      <SegmentList duration=\"100\">"
+      "        <SegmentURL media=\"TestMediaAdaptation\""
+      "                    mediaRange=\"10-20\""
+      "                    index=\"TestIndexAdaptation\""
+      "                    indexRange=\"30-40\">"
+      "        </SegmentURL>"
+      "      </SegmentList>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentList duration=\"110\">"
+      "          <SegmentURL media=\"TestMediaRep\""
+      "                      mediaRange=\"100-200\""
+      "                      index=\"TestIndexRep\""
+      "                      indexRange=\"300-400\">"
+      "          </SegmentURL>"
+      "        </SegmentList>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  /* expected duration of the next fragment
+   * Segment duration was set to 100 in AdaptationSet and to 110 in Representation
+   * We expect duration to be 110
+   */
+  expectedDuration = duration_to_ms (0, 0, 0, 0, 0, 110, 0);
+  expectedTimestamp = duration_to_ms (0, 0, 0, 0, 0, 0, 0);
+
+  /* the representation contains 1 segment (the one from Representation) */
+
+  /* check first segment */
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (fragment.uri, "/TestMediaRep");
+  assert_equals_int64 (fragment.range_start, 100);
+  assert_equals_int64 (fragment.range_end, 200);
+  assert_equals_string (fragment.index_uri, "/TestIndexRep");
+  assert_equals_int64 (fragment.index_range_start, 300);
+  assert_equals_int64 (fragment.index_range_end, 400);
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  /* try to advance to next segment. Should fail */
+  flow = gst_mpd_client2_advance_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (flow, GST_FLOW_EOS);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test segment list
+ *
+ */
+GST_START_TEST (dash_mpdparser_segment_list)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  GstActiveStream *activeStream;
+  GstMediaFragmentInfo fragment;
+  GstClockTime expectedDuration;
+  GstClockTime expectedTimestamp;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period start=\"P0Y0M0DT0H0M10S\">"
+      "    <AdaptationSet mimeType=\"video/mp4\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentList duration=\"12000\">"
+      "          <SegmentURL media=\"TestMedia\""
+      "                      mediaRange=\"100-200\""
+      "                      index=\"TestIndex\""
+      "                      indexRange=\"300-400\">"
+      "          </SegmentURL>"
+      "        </SegmentList>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  /* expected duration of the next fragment
+   * Segment duration was set larger than period duration (12000 vs 11000).
+   * We expect it to be limited to period duration.
+   */
+  expectedDuration = duration_to_ms (0, 0, 0, 3, 3, 20, 0);
+  expectedTimestamp = duration_to_ms (0, 0, 0, 0, 0, 10, 0);
+
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (fragment.uri, "/TestMedia");
+  assert_equals_int64 (fragment.range_start, 100);
+  assert_equals_int64 (fragment.range_end, 200);
+  assert_equals_string (fragment.index_uri, "/TestIndex");
+  assert_equals_int64 (fragment.index_range_start, 300);
+  assert_equals_int64 (fragment.index_range_end, 400);
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test segment template
+ *
+ */
+GST_START_TEST (dash_mpdparser_segment_template)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  GstActiveStream *activeStream;
+  GstMediaFragmentInfo fragment;
+  GstClockTime expectedDuration;
+  GstClockTime expectedTimestamp;
+  GstClockTime periodStartTime;
+  GstClockTime offset;
+  GstClockTime lastFragmentTimestampEnd;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period start=\"P0Y0M0DT0H0M10S\">"
+      "    <AdaptationSet mimeType=\"video/mp4\">"
+      "      <Representation id=\"repId\" bandwidth=\"250000\">"
+      "        <SegmentTemplate duration=\"12000\""
+      "                         presentationTimeOffset=\"15\""
+      "                         media=\"TestMedia_rep=$RepresentationID$number=$Number$bandwidth=$Bandwidth$time=$Time$\""
+      "                         index=\"TestIndex\">"
+      "        </SegmentTemplate>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  /* expected duration of the next fragment
+   * Segment duration was set larger than period duration (12000 vs 11000).
+   * We expect it to not be limited to period duration.
+   */
+  expectedDuration = duration_to_ms (0, 0, 0, 0, 0, 12000, 0);
+
+  /* while the period starts at 10ms, the fragment timestamp is supposed to be
+   * 0ms. timestamps are starting from 0 at every period, and only the overall
+   * composition of periods should consider the period start timestamp. In
+   * dashdemux this is done by mapping the 0 fragment timestamp to a stream
+   * time equal to the period start time.
+   */
+  expectedTimestamp = duration_to_ms (0, 0, 0, 0, 0, 0, 0);
+
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (fragment.uri,
+      "/TestMedia_rep=repIdnumber=1bandwidth=250000time=0");
+  assert_equals_int64 (fragment.range_start, 0);
+  assert_equals_int64 (fragment.range_end, -1);
+  assert_equals_string (fragment.index_uri, "/TestIndex");
+  assert_equals_int64 (fragment.index_range_start, 0);
+  assert_equals_int64 (fragment.index_range_end, -1);
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+
+  periodStartTime = gst_mpd_client2_get_period_start_time (mpdclient);
+  assert_equals_uint64 (periodStartTime, 10 * GST_SECOND);
+
+  offset = gst_mpd_client2_get_stream_presentation_offset (mpdclient, 0);
+  assert_equals_uint64 (offset, 15 * GST_SECOND);
+
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  /*
+   * Period starts at 10s.
+   * MPD has a duration of 3h3m30s, so period duration is 3h3m20s.
+   * We expect the last fragment to end at period start + period duration: 3h3m30s
+   */
+  expectedTimestamp = duration_to_ms (0, 0, 0, 3, 3, 30, 0);
+  ret = gst_mpd_client2_get_last_fragment_timestamp_end (mpdclient, 0,
+      &lastFragmentTimestampEnd);
+  assert_equals_int (ret, TRUE);
+  assert_equals_uint64 (lastFragmentTimestampEnd,
+      expectedTimestamp * GST_MSECOND);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test segment timeline
+ *
+ */
+GST_START_TEST (dash_mpdparser_segment_timeline)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  GstActiveStream *activeStream;
+  GstMediaFragmentInfo fragment;
+  GstClockTime expectedDuration;
+  GstClockTime expectedTimestamp;
+  GstFlowReturn flow;
+  GstDateTime *segmentAvailability;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period start=\"P0Y0M0DT0H0M10S\">"
+      "    <AdaptationSet mimeType=\"video/mp4\">"
+      "      <SegmentList>"
+      "        <SegmentTimeline>"
+      "          <S t=\"10\"  d=\"20\" r=\"30\"></S>"
+      "        </SegmentTimeline>"
+      "      </SegmentList>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentList>"
+      "          <SegmentTimeline>"
+      "            <S t=\"3\"  d=\"2\" r=\"1\"></S>"
+      "            <S t=\"10\" d=\"3\" r=\"0\"></S>"
+      "          </SegmentTimeline>"
+      "          <SegmentURL media=\"TestMedia0\""
+      "                      index=\"TestIndex0\">"
+      "          </SegmentURL>"
+      "          <SegmentURL media=\"TestMedia1\""
+      "                      index=\"TestIndex1\">"
+      "          </SegmentURL>"
+      "        </SegmentList>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  /* expected duration of the next fragment */
+  expectedDuration = duration_to_ms (0, 0, 0, 0, 0, 2, 0);
+  expectedTimestamp = duration_to_ms (0, 0, 0, 0, 0, 13, 0);
+
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (fragment.uri, "/TestMedia0");
+  assert_equals_string (fragment.index_uri, "/TestIndex0");
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  /* first segment starts at 3s and has a duration of 2s.
+   * We also add period start time (10s) so we expect a segment availability
+   * start time of 15s
+   */
+  segmentAvailability =
+      gst_mpd_client2_get_next_segment_availability_start_time (mpdclient,
+      activeStream);
+  fail_unless (segmentAvailability != NULL);
+  assert_equals_int (gst_date_time_get_year (segmentAvailability), 2015);
+  assert_equals_int (gst_date_time_get_month (segmentAvailability), 3);
+  assert_equals_int (gst_date_time_get_day (segmentAvailability), 24);
+  assert_equals_int (gst_date_time_get_hour (segmentAvailability), 0);
+  assert_equals_int (gst_date_time_get_minute (segmentAvailability), 0);
+  assert_equals_int (gst_date_time_get_second (segmentAvailability), 15);
+  gst_date_time_unref (segmentAvailability);
+
+  /* advance to next segment */
+  flow = gst_mpd_client2_advance_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (flow, GST_FLOW_OK);
+
+  /* second segment starts after first ends */
+  expectedTimestamp = expectedTimestamp + expectedDuration;
+
+  /* check second segment.
+   * It is a repeat of first segmentURL, because "r" in SegmentTimeline is 1
+   */
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (fragment.uri, "/TestMedia0");
+  assert_equals_string (fragment.index_uri, "/TestIndex0");
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  /* first segment starts at 3s and has a duration of 2s.
+   * Second segment starts when the first ends (5s) and has a duration of 2s,
+   * so it ends at 7s.
+   * We also add period start time (10s) so we expect a segment availability
+   * start time of 17s
+   */
+  segmentAvailability =
+      gst_mpd_client2_get_next_segment_availability_start_time (mpdclient,
+      activeStream);
+  fail_unless (segmentAvailability != NULL);
+  assert_equals_int (gst_date_time_get_year (segmentAvailability), 2015);
+  assert_equals_int (gst_date_time_get_month (segmentAvailability), 3);
+  assert_equals_int (gst_date_time_get_day (segmentAvailability), 24);
+  assert_equals_int (gst_date_time_get_hour (segmentAvailability), 0);
+  assert_equals_int (gst_date_time_get_minute (segmentAvailability), 0);
+  assert_equals_int (gst_date_time_get_second (segmentAvailability), 17);
+  gst_date_time_unref (segmentAvailability);
+
+  /* advance to next segment */
+  flow = gst_mpd_client2_advance_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (flow, GST_FLOW_OK);
+
+  /* third segment has a small gap after the second ends  (t=10) */
+  expectedDuration = duration_to_ms (0, 0, 0, 0, 0, 3, 0);
+  expectedTimestamp = duration_to_ms (0, 0, 0, 0, 0, 20, 0);
+
+  /* check third segment */
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (fragment.uri, "/TestMedia1");
+  assert_equals_string (fragment.index_uri, "/TestIndex1");
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  /* Third segment starts at 10s and has a duration of 3s so it ends at 13s.
+   * We also add period start time (10s) so we expect a segment availability
+   * start time of 23s
+   */
+  segmentAvailability =
+      gst_mpd_client2_get_next_segment_availability_start_time (mpdclient,
+      activeStream);
+  fail_unless (segmentAvailability != NULL);
+  assert_equals_int (gst_date_time_get_year (segmentAvailability), 2015);
+  assert_equals_int (gst_date_time_get_month (segmentAvailability), 3);
+  assert_equals_int (gst_date_time_get_day (segmentAvailability), 24);
+  assert_equals_int (gst_date_time_get_hour (segmentAvailability), 0);
+  assert_equals_int (gst_date_time_get_minute (segmentAvailability), 0);
+  assert_equals_int (gst_date_time_get_second (segmentAvailability), 23);
+  gst_date_time_unref (segmentAvailability);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test SegmentList with multiple inherited segmentURLs
+ *
+ */
+GST_START_TEST (dash_mpdparser_multiple_inherited_segmentURL)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  GstActiveStream *activeStream;
+  GstMediaFragmentInfo fragment;
+  GstClockTime expectedDuration;
+  GstClockTime expectedTimestamp;
+  GstFlowReturn flow;
+
+  /*
+   * Period duration is 30 seconds
+   * Period start is 10 seconds. Thus, period duration is 20 seconds.
+   *
+   * There are 2 segments in the AdaptationSet segment list and 2 in the
+   * Representation's segment list.
+   * Segment duration is 5s for the Adaptation segments and 8s for
+   * Representation segments.
+   * Separately, each segment list (duration 2*5=10 or 2*8=16) fits comfortably
+   * in the Period's 20s duration.
+   *
+   * We expect the Representation segments to overwrite the AdaptationSet segments.
+   */
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      " profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      " availabilityStartTime=\"2015-03-24T0:0:0\""
+      " mediaPresentationDuration=\"P0Y0M0DT0H0M30S\">"
+      "<Period>"
+      "  <AdaptationSet mimeType=\"video/mp4\">"
+      "    <SegmentList duration=\"5\">"
+      "      <SegmentURL"
+      "         media=\"TestMedia0\" mediaRange=\"10-20\""
+      "         index=\"TestIndex0\" indexRange=\"100-200\""
+      "      ></SegmentURL>"
+      "      <SegmentURL"
+      "         media=\"TestMedia1\" mediaRange=\"20-30\""
+      "         index=\"TestIndex1\" indexRange=\"200-300\""
+      "      ></SegmentURL>"
+      "    </SegmentList>"
+      "    <Representation id=\"1\" bandwidth=\"250000\">"
+      "      <SegmentList duration=\"8\">"
+      "        <SegmentURL"
+      "           media=\"TestMedia2\" mediaRange=\"30-40\""
+      "           index=\"TestIndex2\" indexRange=\"300-400\""
+      "        ></SegmentURL>"
+      "        <SegmentURL"
+      "           media=\"TestMedia3\" mediaRange=\"40-50\""
+      "           index=\"TestIndex3\" indexRange=\"400-500\""
+      "        ></SegmentURL>"
+      "      </SegmentList>"
+      "    </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  expectedDuration = duration_to_ms (0, 0, 0, 0, 0, 8, 0);
+  expectedTimestamp = duration_to_ms (0, 0, 0, 0, 0, 0, 0);
+
+  /* the representation contains 2 segments defined in the Representation
+   *
+   * Both will have the duration specified in the Representation (8)
+   */
+
+  /* check first segment */
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (fragment.uri, "/TestMedia2");
+  assert_equals_int64 (fragment.range_start, 30);
+  assert_equals_int64 (fragment.range_end, 40);
+  assert_equals_string (fragment.index_uri, "/TestIndex2");
+  assert_equals_int64 (fragment.index_range_start, 300);
+  assert_equals_int64 (fragment.index_range_end, 400);
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  /* advance to next segment */
+  flow = gst_mpd_client2_advance_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (flow, GST_FLOW_OK);
+
+  /* second segment starts after previous ends */
+  expectedTimestamp = expectedTimestamp + expectedDuration;
+
+  /* check second segment */
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (fragment.uri, "/TestMedia3");
+  assert_equals_int64 (fragment.range_start, 40);
+  assert_equals_int64 (fragment.range_end, 50);
+  assert_equals_string (fragment.index_uri, "/TestIndex3");
+  assert_equals_int64 (fragment.index_range_start, 400);
+  assert_equals_int64 (fragment.index_range_end, 500);
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  /* try to advance to the next segment. There isn't any, so it should fail */
+  flow = gst_mpd_client2_advance_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (flow, GST_FLOW_EOS);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test SegmentList with multiple segmentURL
+ *
+ */
+GST_START_TEST (dash_mpdparser_multipleSegmentURL)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+  GstActiveStream *activeStream;
+  GstMediaFragmentInfo fragment;
+  GstClockTime expectedDuration;
+  GstClockTime expectedTimestamp;
+  GstFlowReturn flow;
+
+  /*
+   * Period duration is 30 seconds
+   * Period start is 10 seconds. Thus, period duration is 20 seconds.
+   *
+   * Segment duration is 25 seconds. There are 2 segments in the list.
+   * We expect first segment to have a duration of 20 seconds (limited by the period)
+   * and the second segment to not exist.
+   */
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      " profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      " availabilityStartTime=\"2015-03-24T0:0:0\""
+      " mediaPresentationDuration=\"P0Y0M0DT0H0M30S\">"
+      "<Period start=\"P0Y0M0DT0H0M10S\">"
+      "  <AdaptationSet mimeType=\"video/mp4\">"
+      "    <Representation id=\"1\" bandwidth=\"250000\">"
+      "      <SegmentList duration=\"25\">"
+      "        <SegmentURL"
+      "           media=\"TestMedia0\" mediaRange=\"10-20\""
+      "           index=\"TestIndex0\" indexRange=\"100-200\""
+      "        ></SegmentURL>"
+      "        <SegmentURL"
+      "           media=\"TestMedia1\" mediaRange=\"20-30\""
+      "           index=\"TestIndex1\" indexRange=\"200-300\""
+      "        ></SegmentURL>"
+      "      </SegmentList>"
+      "    </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  /* setup streaming from the first adaptation set */
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, TRUE);
+
+  activeStream = gst_mpd_client2_get_active_stream_by_index (mpdclient, 0);
+  fail_if (activeStream == NULL);
+
+  expectedDuration = duration_to_ms (0, 0, 0, 0, 0, 20, 0);
+  expectedTimestamp = duration_to_ms (0, 0, 0, 0, 0, 10, 0);
+
+  /* the representation contains 2 segments. The first is partially
+   * clipped, and the second entirely (and thus discarded).
+   */
+
+  /* check first segment */
+  ret = gst_mpd_client2_get_next_fragment (mpdclient, 0, &fragment);
+  assert_equals_int (ret, TRUE);
+  assert_equals_string (fragment.uri, "/TestMedia0");
+  assert_equals_int64 (fragment.range_start, 10);
+  assert_equals_int64 (fragment.range_end, 20);
+  assert_equals_string (fragment.index_uri, "/TestIndex0");
+  assert_equals_int64 (fragment.index_range_start, 100);
+  assert_equals_int64 (fragment.index_range_end, 200);
+  assert_equals_uint64 (fragment.duration, expectedDuration * GST_MSECOND);
+  assert_equals_uint64 (fragment.timestamp, expectedTimestamp * GST_MSECOND);
+  gst_mpdparser_media_fragment_info_clear (&fragment);
+
+  /* advance to next segment */
+  flow = gst_mpd_client2_advance_segment (mpdclient, activeStream, TRUE);
+  assert_equals_int (flow, GST_FLOW_EOS);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing empty xml string
+ *
+ */
+GST_START_TEST (dash_mpdparser_missing_xml)
+{
+  const gchar *xml = "";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, FALSE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing an xml with no mpd tag
+ *
+ */
+GST_START_TEST (dash_mpdparser_missing_mpd)
+{
+  const gchar *xml = "<?xml version=\"1.0\"?>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, FALSE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing an MPD with a wrong end tag
+ */
+GST_START_TEST (dash_mpdparser_no_end_tag)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\"> </NPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, FALSE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing an MPD with no default namespace
+ */
+GST_START_TEST (dash_mpdparser_no_default_namespace)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD profiles=\"urn:mpeg:dash:profile:isoff-main:2011\"></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling wrong period duration during attempts to
+ * infer a period duration from the start time of the next period
+ */
+GST_START_TEST (dash_mpdparser_wrong_period_duration_inferred_from_next_period)
+{
+  const gchar *periodName;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period id=\"Period0\" duration=\"P0Y0M0DT1H1M0S\"></Period>"
+      "  <Period id=\"Period1\"></Period>"
+      "  <Period id=\"Period2\" start=\"P0Y0M0DT0H0M10S\"></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* period_idx should be 0 and we should have no active periods */
+  assert_equals_uint64 (mpdclient->period_idx, 0);
+  fail_unless (mpdclient->periods == NULL);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* Period0 should be present */
+  fail_unless (mpdclient->periods != NULL);
+  periodName = gst_mpd_client2_get_period_id (mpdclient);
+  assert_equals_string (periodName, "Period0");
+
+  /* Period1 should not be present due to wrong duration */
+  ret = gst_mpd_client2_set_period_index (mpdclient, 1);
+  assert_equals_int (ret, FALSE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test handling wrong period duration during attempts to
+ * infer a period duration from the mediaPresentationDuration
+ */
+GST_START_TEST
+    (dash_mpdparser_wrong_period_duration_inferred_from_next_mediaPresentationDuration)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period id=\"Period0\" start=\"P0Y0M0DT4H0M0S\"></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* period_idx should be 0 and we should have no active periods */
+  assert_equals_uint64 (mpdclient->period_idx, 0);
+  fail_unless (mpdclient->periods == NULL);
+
+  /* process the xml data
+   * should fail due to wrong duration in Period0 (start > mediaPresentationDuration)
+   */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, FALSE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (dash_mpdparser_whitespace_strings)
+{
+  fail_unless (_mpd_helper_validate_no_whitespace ("") == TRUE);
+  fail_unless (_mpd_helper_validate_no_whitespace ("/") == TRUE);
+  fail_unless (_mpd_helper_validate_no_whitespace (" ") == FALSE);
+  fail_unless (_mpd_helper_validate_no_whitespace ("aaaaaaaa ") == FALSE);
+  fail_unless (_mpd_helper_validate_no_whitespace ("a\ta") == FALSE);
+  fail_unless (_mpd_helper_validate_no_whitespace ("a\ra") == FALSE);
+  fail_unless (_mpd_helper_validate_no_whitespace ("a\na") == FALSE);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (dash_mpdparser_rfc1738_strings)
+{
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("/") == TRUE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url (" ") == FALSE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("aaaaaaaa ") == FALSE);
+
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("") == TRUE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("a") == TRUE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url
+      (";:@&=aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ0123456789$-_.+!*'(),%AA")
+      == TRUE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url
+      (";:@&=aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ0123456789$-_.+!*'(),/%AA")
+      == TRUE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url
+      (";:@&=aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ0123456789$-_.+!*'(),% ")
+      == FALSE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("%AA") == TRUE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("%A") == FALSE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("%") == FALSE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("%XA") == FALSE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("%AX") == FALSE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("%XX") == FALSE);
+  fail_unless (gst_mpdparser_validate_rfc1738_url ("\001") == FALSE);
+}
+
+GST_END_TEST;
+
+/*
+ * Test negative period duration
+ */
+GST_START_TEST (dash_mpdparser_negative_period_duration)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period id=\"Period0\""
+      "          start=\"P0Y0M0DT1H0M0S\""
+      "          duration=\"-PT10S\">"
+      "  </Period><Period id=\"Period1\"></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data
+   * should fail due to negative duration of Period0
+   */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, FALSE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing negative values from attributes that should be unsigned
+ *
+ */
+GST_START_TEST (dash_mpdparser_read_unsigned_from_negative_values)
+{
+  GstMPDPeriodNode *periodNode;
+  GstMPDSegmentBaseNode *segmentBase;
+  GstMPDAdaptationSetNode *adaptationSet;
+  GstMPDRepresentationNode *representation;
+  GstMPDSubRepresentationNode *subRepresentation;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015--1-13T12:25:37\">"
+      "  <Period start=\"-P-2015Y\" duration=\"-P-5M\">"
+      "    <SegmentBase presentationTimeOffset=\"-10\""
+      "                 timescale=\"-5\""
+      "                 indexRange=\"1--10\">"
+      "    </SegmentBase>"
+      "    <AdaptationSet par=\"-1:7\""
+      "                   minFrameRate=\" -1\""
+      "                   segmentAlignment=\"-4\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SubRepresentation dependencyLevel=\"1 -2 3\">"
+      "        </SubRepresentation>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  periodNode = (GstMPDPeriodNode *) mpdclient->mpd_root_node->Periods->data;
+  segmentBase = periodNode->SegmentBase;
+  adaptationSet = (GstMPDAdaptationSetNode *) periodNode->AdaptationSets->data;
+  representation = (GstMPDRepresentationNode *)
+      adaptationSet->Representations->data;
+  subRepresentation = (GstMPDSubRepresentationNode *)
+      representation->SubRepresentations->data;
+
+  /* availabilityStartTime parsing should fail */
+  fail_if (mpdclient->mpd_root_node->availabilityStartTime != NULL);
+
+  /* Period start parsing should fail */
+  assert_equals_int64 (periodNode->start, -1);
+
+  /* Period duration parsing should fail */
+  assert_equals_int64 (periodNode->duration, -1);
+
+  /* expect negative value to be rejected and presentationTimeOffset to be 0 */
+  assert_equals_uint64 (segmentBase->presentationTimeOffset, 0);
+  assert_equals_uint64 (segmentBase->timescale, 1);
+  fail_if (segmentBase->indexRange != NULL);
+
+  /* par ratio parsing should fail */
+  fail_if (adaptationSet->par != NULL);
+
+  /* minFrameRate parsing should fail */
+  fail_if (GST_MPD_REPRESENTATION_BASE_NODE (adaptationSet)->minFrameRate !=
+      NULL);
+
+  /* segmentAlignment parsing should fail */
+  fail_if (adaptationSet->segmentAlignment != NULL);
+
+  /* dependency level parsing should fail */
+  fail_if (subRepresentation->dependencyLevel != NULL);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test negative mediaPresentationDuration duration
+ */
+GST_START_TEST (dash_mpdparser_negative_mediaPresentationDuration)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"-P0Y0M0DT3H3M30S\">"
+      "  <Period id=\"Period0\" start=\"P0Y0M0DT1H0M0S\"></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data
+   * should fail due to negative duration of mediaPresentationDuration
+   */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, FALSE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing an MPD with no profiles
+ */
+GST_START_TEST (dash_mpdparser_no_profiles)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\"></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, strlen (xml));
+
+  assert_equals_int (ret, TRUE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test S node list greater than SegmentURL list
+ *
+ */
+GST_START_TEST (dash_mpdparser_unmatched_segmentTimeline_segmentURL)
+{
+  GList *adaptationSets;
+  GstMPDAdaptationSetNode *adapt_set;
+
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     mediaPresentationDuration=\"P0Y0M0DT3H3M30S\">"
+      "  <Period start=\"P0Y0M0DT0H0M10S\">"
+      "    <AdaptationSet mimeType=\"video/mp4\">"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentList>"
+      "          <SegmentTimeline>"
+      "            <S t=\"3\"  d=\"2\" r=\"1\"></S>"
+      "            <S t=\"10\" d=\"3\" r=\"0\"></S>"
+      "          </SegmentTimeline>"
+      "          <SegmentURL media=\"TestMedia0\""
+      "                      index=\"TestIndex0\">"
+      "          </SegmentURL>"
+      "        </SegmentList>"
+      "      </Representation></AdaptationSet></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  /* process the xml data */
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+
+  /* get the list of adaptation sets of the first period */
+  adaptationSets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  fail_if (adaptationSets == NULL);
+
+  adapt_set = (GstMPDAdaptationSetNode *) g_list_nth_data (adaptationSets, 0);
+  fail_if (adapt_set == NULL);
+
+  /* setup streaming from the first adaptation set.
+   * Should fail because the second S node does not have a  matching
+   * SegmentURL node
+   */
+  ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set);
+  assert_equals_int (ret, FALSE);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing of the default presentation delay property
+ */
+GST_START_TEST (dash_mpdparser_default_presentation_delay)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     maxSegmentDuration=\"PT2S\">"
+      "  <Period id=\"Period0\" start=\"P0S\"></Period></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+  gint64 value;
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+  value = gst_mpd_client2_parse_default_presentation_delay (mpdclient, "5s");
+  assert_equals_int64 (value, 5000);
+  value = gst_mpd_client2_parse_default_presentation_delay (mpdclient, "5S");
+  assert_equals_int64 (value, 5000);
+  value =
+      gst_mpd_client2_parse_default_presentation_delay (mpdclient, "5 seconds");
+  assert_equals_int64 (value, 5000);
+  value =
+      gst_mpd_client2_parse_default_presentation_delay (mpdclient, "2500ms");
+  assert_equals_int64 (value, 2500);
+  value = gst_mpd_client2_parse_default_presentation_delay (mpdclient, "3f");
+  assert_equals_int64 (value, 6000);
+  value = gst_mpd_client2_parse_default_presentation_delay (mpdclient, "3F");
+  assert_equals_int64 (value, 6000);
+  value = gst_mpd_client2_parse_default_presentation_delay (mpdclient, "");
+  assert_equals_int64 (value, 0);
+  value = gst_mpd_client2_parse_default_presentation_delay (mpdclient, "10");
+  assert_equals_int64 (value, 0);
+  value =
+      gst_mpd_client2_parse_default_presentation_delay (mpdclient,
+      "not a number");
+  assert_equals_int64 (value, 0);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (dash_mpdparser_duration)
+{
+  guint64 v;
+
+  fail_unless (_mpd_helper_parse_duration ("", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration (" ", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("0", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("D-1", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("T", &v) == FALSE);
+
+  fail_unless (_mpd_helper_parse_duration ("P", &v) == TRUE);
+  fail_unless (_mpd_helper_parse_duration ("PT", &v) == TRUE);
+  fail_unless (_mpd_helper_parse_duration ("PX", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PPT", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PTT", &v) == FALSE);
+
+  fail_unless (_mpd_helper_parse_duration ("P1D", &v) == TRUE);
+  fail_unless (_mpd_helper_parse_duration ("P1D1D", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("P1D1M", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("P1M1D", &v) == TRUE);
+  fail_unless (_mpd_helper_parse_duration ("P1M1D1M", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("P1M1D1D", &v) == FALSE);
+
+  fail_unless (_mpd_helper_parse_duration ("P0M0D", &v) == TRUE);
+  fail_unless (_mpd_helper_parse_duration ("P-1M", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("P15M", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("P-1D", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("P35D", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("P-1Y", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PT-1H", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PT25H", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PT-1M", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PT65M", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PT-1S", &v) == FALSE);
+  /* seconds are allowed to be larger than 60 */
+  fail_unless (_mpd_helper_parse_duration ("PT65S", &v) == TRUE);
+
+  fail_unless (_mpd_helper_parse_duration ("PT1.1H", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PT1-1H", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PT1-H", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PT-H", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PTH", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PT0", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("PT1.1S", &v) == TRUE);
+  fail_unless (_mpd_helper_parse_duration ("PT1.1.1S", &v) == FALSE);
+
+  fail_unless (_mpd_helper_parse_duration ("P585Y", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("P584Y", &v) == TRUE);
+
+  fail_unless (_mpd_helper_parse_duration (" P10DT8H", &v) == TRUE);
+  fail_unless (_mpd_helper_parse_duration ("P10D T8H", &v) == FALSE);
+  fail_unless (_mpd_helper_parse_duration ("P10DT8H ", &v) == TRUE);
+}
+
+GST_END_TEST;
+
+/*
+ * Test that the maximum_segment_duration correctly implements the
+ * rules in the DASH specification
+ */
+GST_START_TEST (dash_mpdparser_maximum_segment_duration)
+{
+  const gchar *xml_template =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     availabilityStartTime=\"2015-03-24T0:0:0\""
+      "     %s "
+      "     mediaPresentationDuration=\"P100Y\">"
+      "  <Period id=\"Period0\" start=\"PT0S\">"
+      "    <AdaptationSet mimeType=\"video/mp4\" >"
+      "      <SegmentTemplate timescale=\"90000\" initialization=\"$RepresentationID$/Header.m4s\" media=\"$RepresentationID$/$Number$.m4s\" duration=\"360000\" />"
+      "      <Representation id=\"video1\" width=\"576\" height=\"324\" frameRate=\"25\" sar=\"1:1\" bandwidth=\"900000\" codecs=\"avc1.4D401E\"/>"
+      "    </AdaptationSet>"
+      "      <AdaptationSet mimeType=\"audio/mp4\" >"
+      "        <SegmentTemplate timescale=\"90000\" initialization=\"$RepresentationID$/Header.m4s\" media=\"$RepresentationID$/$Number$.m4s\" duration=\"340000\" />"
+      "        <Representation id=\"audio1\" audioSamplingRate=\"22050\" bandwidth=\"29600\" codecs=\"mp4a.40.2\">"
+      "        <AudioChannelConfiguration schemeIdUri=\"urn:mpeg:dash:23003:3:audio_channel_configuration:2011\" value=\"2\"/>"
+      "      </Representation>" "    </AdaptationSet>" "  </Period></MPD>";
+  gboolean ret;
+  GstMPDClient2 *mpdclient;
+  gchar *xml;
+  GstClockTime dur;
+  GList *adapt_sets, *iter;
+
+  xml = g_strdup_printf (xml_template, "maxSegmentDuration=\"PT4.5S\"");
+  mpdclient = gst_mpd_client2_new ();
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  g_free (xml);
+  assert_equals_int (ret, TRUE);
+
+  assert_equals_uint64 (mpdclient->mpd_root_node->maxSegmentDuration,
+      duration_to_ms (0, 0, 0, 0, 0, 4, 500));
+  dur = gst_mpd_client2_get_maximum_segment_duration (mpdclient);
+  assert_equals_uint64 (dur, duration_to_clocktime (0, 0, 0, 0, 0, 4, 500));
+  gst_mpd_client2_free (mpdclient);
+
+  /* now parse without the maxSegmentDuration attribute, to check that
+     gst_mpd_client2_get_maximum_segment_duration uses the maximum
+     duration of any segment
+   */
+  xml = g_strdup_printf (xml_template, "");
+  mpdclient = gst_mpd_client2_new ();
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  g_free (xml);
+  assert_equals_int (ret, TRUE);
+  ret =
+      gst_mpd_client2_setup_media_presentation (mpdclient, GST_CLOCK_TIME_NONE,
+      -1, NULL);
+  assert_equals_int (ret, TRUE);
+  adapt_sets = gst_mpd_client2_get_adaptation_sets (mpdclient);
+  for (iter = adapt_sets; iter; iter = g_list_next (iter)) {
+    GstMPDAdaptationSetNode *adapt_set_node = iter->data;
+
+    ret = gst_mpd_client2_setup_streaming (mpdclient, adapt_set_node);
+    assert_equals_int (ret, TRUE);
+  }
+  dur = gst_mpd_client2_get_maximum_segment_duration (mpdclient);
+  assert_equals_uint64 (dur, duration_to_clocktime (0, 0, 0, 0, 0, 4, 0));
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test parsing xsd:datetime with timezoneoffset.
+ *
+ */
+GST_START_TEST (dash_mpdparser_datetime_with_tz_offset)
+{
+  GstDateTime *availabilityStartTime;
+  GstDateTime *availabilityEndTime;
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     schemaLocation=\"TestSchemaLocation\""
+      "     xmlns:xsi=\"TestNamespaceXSI\""
+      "     xmlns:ext=\"TestNamespaceEXT\""
+      "     id=\"testId\""
+      "     type=\"static\""
+      "     availabilityStartTime=\"2015-03-24T1:10:50+08:00\""
+      "     availabilityEndTime=\"2015-03-24T1:10:50.123456-04:30\""
+      "     mediaPresentationDuration=\"P0Y1M2DT12H10M20.5S\""
+      "     minimumUpdatePeriod=\"P0Y1M2DT12H10M20.5S\""
+      "     minBufferTime=\"P0Y1M2DT12H10M20.5S\""
+      "     timeShiftBufferDepth=\"P0Y1M2DT12H10M20.5S\""
+      "     suggestedPresentationDelay=\"P0Y1M2DT12H10M20.5S\""
+      "     maxSegmentDuration=\"P0Y1M2DT12H10M20.5S\""
+      "     maxSubsegmentDuration=\"P0Y1M2DT12H10M20.5S\"></MPD>";
+
+  gboolean ret;
+  GstMPDClient2 *mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  availabilityStartTime = mpdclient->mpd_root_node->availabilityStartTime;
+  assert_equals_int (gst_date_time_get_year (availabilityStartTime), 2015);
+  assert_equals_int (gst_date_time_get_month (availabilityStartTime), 3);
+  assert_equals_int (gst_date_time_get_day (availabilityStartTime), 24);
+  assert_equals_int (gst_date_time_get_hour (availabilityStartTime), 1);
+  assert_equals_int (gst_date_time_get_minute (availabilityStartTime), 10);
+  assert_equals_int (gst_date_time_get_second (availabilityStartTime), 50);
+  assert_equals_int (gst_date_time_get_microsecond (availabilityStartTime), 0);
+  assert_equals_float (gst_date_time_get_time_zone_offset
+      (availabilityStartTime), 8.0);
+
+  availabilityEndTime = mpdclient->mpd_root_node->availabilityEndTime;
+  assert_equals_int (gst_date_time_get_year (availabilityEndTime), 2015);
+  assert_equals_int (gst_date_time_get_month (availabilityEndTime), 3);
+  assert_equals_int (gst_date_time_get_day (availabilityEndTime), 24);
+  assert_equals_int (gst_date_time_get_hour (availabilityEndTime), 1);
+  assert_equals_int (gst_date_time_get_minute (availabilityEndTime), 10);
+  assert_equals_int (gst_date_time_get_second (availabilityEndTime), 50);
+  assert_equals_int (gst_date_time_get_microsecond (availabilityEndTime),
+      123456);
+  assert_equals_float (gst_date_time_get_time_zone_offset (availabilityEndTime),
+      -4.5);
+
+  gst_mpd_client2_free (mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test generate xml content.
+ *
+ */
+GST_START_TEST (dash_mpdparser_check_mpd_xml_generator)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     schemaLocation=\"TestSchemaLocation\""
+      "     xmlns:xsi=\"TestNamespaceXSI\""
+      "     xmlns:ext=\"TestNamespaceEXT\""
+      "     id=\"testId\""
+      "     type=\"static\""
+      "     availabilityStartTime=\"2015-03-24T1:10:50+08:00\""
+      "     availabilityEndTime=\"2015-03-24T1:10:50.123456-04:30\""
+      "     mediaPresentationDuration=\"P0Y1M2DT12H10M20.5S\""
+      "     minimumUpdatePeriod=\"P0Y1M2DT12H10M20.5S\""
+      "     minBufferTime=\"P0Y1M2DT12H10M20.5S\""
+      "     timeShiftBufferDepth=\"P0Y1M2DT12H10M20.5S\""
+      "     suggestedPresentationDelay=\"P0Y1M2DT12H10M20.5S\""
+      "     maxSegmentDuration=\"P0Y1M2DT12H10M20.5S\""
+      "     maxSubsegmentDuration=\"P0Y1M2DT12H10M20.5S\">"
+      "     <BaseURL serviceLocation=\"TestServiceLocation\""
+      "     byteRange=\"TestByteRange\">TestBaseURL</BaseURL>"
+      "     <Location>TestLocation</Location>"
+      "     <ProgramInformation lang=\"en\""
+      "     moreInformationURL=\"TestMoreInformationUrl\">"
+      "     <Title>TestTitle</Title>"
+      "     <Source>TestSource</Source>"
+      "     <Copyright>TestCopyright</Copyright>"
+      "     </ProgramInformation>"
+      "     <Metrics metrics=\"TestMetric\"><Range starttime=\"P0Y1M2DT12H10M20.5S\""
+      "           duration=\"P0Y1M2DT12H10M20.1234567S\">"
+      "    </Range></Metrics>"
+      "  <Period>"
+      "    <AdaptationSet>"
+      "      <Representation id=\"1\" bandwidth=\"250000\">"
+      "        <SegmentTemplate duration=\"1\">"
+      "        </SegmentTemplate>"
+      "      </Representation></AdaptationSet></Period>" "     </MPD>";
+
+  gboolean ret;
+  gchar *new_xml;
+  gint new_xml_size;
+  GstMPDClient2 *first_mpdclient = NULL;
+  GstMPDClient2 *second_mpdclient = NULL;
+  GstMPDBaseURLNode *first_baseURL, *second_baseURL;
+  GstMPDLocationNode *first_location, *second_location;
+  GstMPDProgramInformationNode *first_prog_info, *second_prog_info;
+  GstMPDMetricsNode *first_metrics, *second_metrics;
+  GstMPDMetricsRangeNode *first_metrics_range, *second_metrics_range;
+
+  first_mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (first_mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  ret =
+      gst_mpd_client2_get_xml_content (first_mpdclient, &new_xml,
+      &new_xml_size);
+  assert_equals_int (ret, TRUE);
+
+  second_mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (second_mpdclient, new_xml, new_xml_size);
+  assert_equals_int (ret, TRUE);
+  g_free (new_xml);
+
+  /* assert that parameters are equal */
+  assert_equals_string (first_mpdclient->mpd_root_node->default_namespace,
+      second_mpdclient->mpd_root_node->default_namespace);
+  assert_equals_string (first_mpdclient->mpd_root_node->namespace_xsi,
+      second_mpdclient->mpd_root_node->namespace_xsi);
+  assert_equals_string (first_mpdclient->mpd_root_node->namespace_ext,
+      second_mpdclient->mpd_root_node->namespace_ext);
+  assert_equals_string (first_mpdclient->mpd_root_node->schemaLocation,
+      second_mpdclient->mpd_root_node->schemaLocation);
+  assert_equals_string (first_mpdclient->mpd_root_node->id,
+      second_mpdclient->mpd_root_node->id);
+  assert_equals_string (first_mpdclient->mpd_root_node->profiles,
+      second_mpdclient->mpd_root_node->profiles);
+  assert_equals_uint64 (first_mpdclient->
+      mpd_root_node->mediaPresentationDuration,
+      second_mpdclient->mpd_root_node->mediaPresentationDuration);
+  assert_equals_uint64 (first_mpdclient->mpd_root_node->minimumUpdatePeriod,
+      second_mpdclient->mpd_root_node->minimumUpdatePeriod);
+  assert_equals_uint64 (first_mpdclient->mpd_root_node->minBufferTime,
+      second_mpdclient->mpd_root_node->minBufferTime);
+  assert_equals_uint64 (first_mpdclient->mpd_root_node->timeShiftBufferDepth,
+      second_mpdclient->mpd_root_node->timeShiftBufferDepth);
+  assert_equals_uint64 (first_mpdclient->
+      mpd_root_node->suggestedPresentationDelay,
+      second_mpdclient->mpd_root_node->suggestedPresentationDelay);
+  assert_equals_uint64 (first_mpdclient->mpd_root_node->maxSegmentDuration,
+      second_mpdclient->mpd_root_node->maxSegmentDuration);
+  assert_equals_uint64 (first_mpdclient->mpd_root_node->maxSubsegmentDuration,
+      second_mpdclient->mpd_root_node->maxSubsegmentDuration);
+
+  /* baseURLs */
+  first_baseURL =
+      (GstMPDBaseURLNode *) first_mpdclient->mpd_root_node->BaseURLs->data;
+  second_baseURL =
+      (GstMPDBaseURLNode *) second_mpdclient->mpd_root_node->BaseURLs->data;
+  assert_equals_string (first_baseURL->baseURL, second_baseURL->baseURL);
+  assert_equals_string (first_baseURL->serviceLocation,
+      second_baseURL->serviceLocation);
+  assert_equals_string (first_baseURL->byteRange, second_baseURL->byteRange);
+
+  /* locations */
+  first_location =
+      (GstMPDLocationNode *) first_mpdclient->mpd_root_node->Locations->data;
+  second_location =
+      (GstMPDLocationNode *) second_mpdclient->mpd_root_node->Locations->data;
+  assert_equals_string (first_location->location, second_location->location);
+
+  /* ProgramInformation */
+  first_prog_info =
+      (GstMPDProgramInformationNode *) first_mpdclient->mpd_root_node->
+      ProgramInfos->data;
+  second_prog_info =
+      (GstMPDProgramInformationNode *) second_mpdclient->mpd_root_node->
+      ProgramInfos->data;
+  assert_equals_string (first_prog_info->lang, second_prog_info->lang);
+  assert_equals_string (first_prog_info->moreInformationURL,
+      second_prog_info->moreInformationURL);
+  assert_equals_string (first_prog_info->Title, second_prog_info->Title);
+  assert_equals_string (first_prog_info->Source, second_prog_info->Source);
+  assert_equals_string (first_prog_info->Copyright,
+      second_prog_info->Copyright);
+
+  /* Metrics */
+  first_metrics =
+      (GstMPDMetricsNode *) first_mpdclient->mpd_root_node->Metrics->data;
+  second_metrics =
+      (GstMPDMetricsNode *) second_mpdclient->mpd_root_node->Metrics->data;
+  assert_equals_string (first_metrics->metrics, second_metrics->metrics);
+
+  /* Metrics Range */
+  first_metrics_range =
+      (GstMPDMetricsRangeNode *) first_metrics->MetricsRanges->data;
+  second_metrics_range =
+      (GstMPDMetricsRangeNode *) second_metrics->MetricsRanges->data;
+  assert_equals_uint64 (first_metrics_range->starttime,
+      second_metrics_range->starttime);
+  assert_equals_uint64 (first_metrics_range->duration,
+      second_metrics_range->duration);
+
+  gst_mpd_client2_free (first_mpdclient);
+  gst_mpd_client2_free (second_mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * Test add mpd content with mpd_client set methods
+ *
+ */
+GST_START_TEST (dash_mpdparser_check_mpd_client_set_methods)
+{
+  const gchar *xml =
+      "<?xml version=\"1.0\"?>"
+      "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\""
+      "     profiles=\"urn:mpeg:dash:profile:isoff-main:2011\""
+      "     schemaLocation=\"TestSchemaLocation\""
+      "     xmlns:xsi=\"TestNamespaceXSI\""
+      "     xmlns:ext=\"TestNamespaceEXT\""
+      "     id=\"testId\""
+      "     type=\"static\""
+      "     availabilityStartTime=\"2015-03-24T1:10:50+08:00\""
+      "     availabilityEndTime=\"2015-03-24T1:10:50.123456-04:30\""
+      "     mediaPresentationDuration=\"P0Y1M2DT12H10M20.5S\""
+      "     minimumUpdatePeriod=\"P0Y1M2DT12H10M20.5S\""
+      "     minBufferTime=\"P0Y1M2DT12H10M20.5S\""
+      "     timeShiftBufferDepth=\"P0Y1M2DT12H10M20.5S\""
+      "     suggestedPresentationDelay=\"P0Y1M2DT12H10M20.5S\""
+      "     maxSegmentDuration=\"P0Y1M2DT12H10M20.5S\""
+      "     maxSubsegmentDuration=\"P0Y1M2DT12H10M20.5S\">"
+      "     <BaseURL serviceLocation=\"TestServiceLocation\""
+      "     byteRange=\"TestByteRange\">TestBaseURL</BaseURL>"
+      "     <Location>TestLocation</Location>"
+      "     <ProgramInformation lang=\"en\""
+      "     moreInformationURL=\"TestMoreInformationUrl\">"
+      "     <Title>TestTitle</Title>"
+      "     <Source>TestSource</Source>"
+      "     <Copyright>TestCopyright</Copyright>"
+      "     </ProgramInformation>"
+      "     <Metrics metrics=\"TestMetric\"><Range starttime=\"P0Y1M2DT12H10M20.5S\""
+      "           duration=\"P0Y1M2DT12H10M20.1234567S\">"
+      "    </Range></Metrics>"
+      "  <Period id=\"TestId\" start=\"PT1M\" duration=\"PT40S\""
+      "          bitstreamSwitching=\"true\">"
+      "    <AdaptationSet id=\"9\" contentType=\"video\" mimeType=\"video\">"
+      "      <Representation id=\"audio_1\" "
+      "                      bandwidth=\"100\""
+      "                      qualityRanking=\"200\""
+      "                      width=\"640\""
+      "                      height=\"480\""
+      "                      codecs=\"avc1\""
+      "                      audioSamplingRate=\"44100\""
+      "                      mimeType=\"audio/mp4\">"
+      "        <SegmentList duration=\"15\" startNumber=\"11\">"
+      "          <SegmentURL media=\"segment001.ts\"></SegmentURL>"
+      "          <SegmentURL media=\"segment002.ts\"></SegmentURL>"
+      "        </SegmentList>"
+      "      </Representation></AdaptationSet></Period>" "     </MPD>";
+  gboolean ret;
+  gchar *period_id;
+  guint adaptation_set_id;
+  gchar *representation_id;
+  GstMPDClient2 *first_mpdclient = NULL;
+  GstMPDClient2 *second_mpdclient = NULL;
+  GstMPDBaseURLNode *first_baseURL, *second_baseURL;
+  GstMPDPeriodNode *first_period, *second_period;
+  GstMPDAdaptationSetNode *first_adap_set, *second_adap_set;
+  GstMPDRepresentationNode *first_rep, *second_rep;
+  GstMPDSegmentListNode *first_seg_list, *second_seg_list;
+  GstMPDSegmentURLNode *first_seg_url, *second_seg_url;
+
+  first_mpdclient = gst_mpd_client2_new ();
+
+  ret = gst_mpd_client2_parse (first_mpdclient, xml, (gint) strlen (xml));
+  assert_equals_int (ret, TRUE);
+
+  second_mpdclient = gst_mpd_client2_new ();
+  gst_mpd_client2_set_root_node (second_mpdclient,
+      "default-namespace", "urn:mpeg:dash:schema:mpd:2011",
+      "profiles", "urn:mpeg:dash:profile:isoff-main:2011",
+      "schema-location", "TestSchemaLocation",
+      "namespace-xsi", "TestNamespaceXSI",
+      "namespace-ext", "TestNamespaceEXT", "id", "testId", NULL);
+  gst_mpd_client2_add_baseurl_node (second_mpdclient,
+      "url", "TestBaseURL",
+      "service-location", "TestServiceLocation",
+      "byte-range", "TestByteRange", NULL);
+  period_id = gst_mpd_client2_set_period_node (second_mpdclient, (gchar *) "TestId", "start", (guint64) 60000,  // ms
+      "duration", (guint64) 40000, "bitstream-switching", 1, NULL);
+  adaptation_set_id =
+      gst_mpd_client2_set_adaptation_set_node (second_mpdclient, period_id, 9,
+      "content-type", "video", "mime-type", "video", NULL);
+
+  representation_id =
+      gst_mpd_client2_set_representation_node (second_mpdclient, period_id,
+      adaptation_set_id, (gchar *) "audio_1", "bandwidth", 100,
+      "quality-ranking", 200, "mime-type", "audio/mp4", "width", 640, "height",
+      480, "codecs", "avc1", "audio-sampling-rate", 44100, NULL);
+
+  gst_mpd_client2_set_segment_list (second_mpdclient, period_id,
+      adaptation_set_id, representation_id, "duration", 15, "start-number", 11,
+      NULL);
+  gst_mpd_client2_add_segment_url (second_mpdclient, period_id,
+      adaptation_set_id, representation_id, "media", "segment001.ts", NULL);
+  gst_mpd_client2_add_segment_url (second_mpdclient, period_id,
+      adaptation_set_id, representation_id, "media", "segment002.ts", NULL);
+
+  /* assert that parameters are equal */
+  assert_equals_string (first_mpdclient->mpd_root_node->default_namespace,
+      second_mpdclient->mpd_root_node->default_namespace);
+  assert_equals_string (first_mpdclient->mpd_root_node->namespace_xsi,
+      second_mpdclient->mpd_root_node->namespace_xsi);
+  assert_equals_string (first_mpdclient->mpd_root_node->namespace_ext,
+      second_mpdclient->mpd_root_node->namespace_ext);
+  assert_equals_string (first_mpdclient->mpd_root_node->schemaLocation,
+      second_mpdclient->mpd_root_node->schemaLocation);
+  assert_equals_string (first_mpdclient->mpd_root_node->id,
+      second_mpdclient->mpd_root_node->id);
+  assert_equals_string (first_mpdclient->mpd_root_node->profiles,
+      second_mpdclient->mpd_root_node->profiles);
+
+
+  /* baseURLs */
+  first_baseURL =
+      (GstMPDBaseURLNode *) first_mpdclient->mpd_root_node->BaseURLs->data;
+  second_baseURL =
+      (GstMPDBaseURLNode *) second_mpdclient->mpd_root_node->BaseURLs->data;
+  assert_equals_string (first_baseURL->baseURL, second_baseURL->baseURL);
+  assert_equals_string (first_baseURL->serviceLocation,
+      second_baseURL->serviceLocation);
+  assert_equals_string (first_baseURL->byteRange, second_baseURL->byteRange);
+
+  /* Period */
+  first_period =
+      (GstMPDPeriodNode *) first_mpdclient->mpd_root_node->Periods->data;
+  second_period =
+      (GstMPDPeriodNode *) second_mpdclient->mpd_root_node->Periods->data;
+
+  assert_equals_string (first_period->id, second_period->id);
+  assert_equals_int64 (first_period->start, second_period->start);
+  assert_equals_int64 (first_period->duration, second_period->duration);
+  assert_equals_int (first_period->bitstreamSwitching,
+      second_period->bitstreamSwitching);
+
+  /* Adaptation set */
+  first_adap_set =
+      (GstMPDAdaptationSetNode *) first_period->AdaptationSets->data;
+  second_adap_set =
+      (GstMPDAdaptationSetNode *) second_period->AdaptationSets->data;
+
+  assert_equals_int (first_adap_set->id, second_adap_set->id);
+  assert_equals_string (first_adap_set->contentType,
+      second_adap_set->contentType);
+  assert_equals_string (GST_MPD_REPRESENTATION_BASE_NODE
+      (first_adap_set)->mimeType,
+      GST_MPD_REPRESENTATION_BASE_NODE (second_adap_set)->mimeType);
+
+  /* Representation */
+  first_rep =
+      (GstMPDRepresentationNode *) first_adap_set->Representations->data;
+  second_rep =
+      (GstMPDRepresentationNode *) second_adap_set->Representations->data;
+  assert_equals_string (first_rep->id, second_rep->id);
+  assert_equals_int (first_rep->bandwidth, second_rep->bandwidth);
+  assert_equals_int (first_rep->qualityRanking, second_rep->qualityRanking);
+  assert_equals_string (GST_MPD_REPRESENTATION_BASE_NODE (first_rep)->mimeType,
+      GST_MPD_REPRESENTATION_BASE_NODE (second_rep)->mimeType);
+
+  assert_equals_int (GST_MPD_REPRESENTATION_BASE_NODE (first_rep)->width,
+      GST_MPD_REPRESENTATION_BASE_NODE (second_rep)->width);
+
+  assert_equals_int (GST_MPD_REPRESENTATION_BASE_NODE (first_rep)->height,
+      GST_MPD_REPRESENTATION_BASE_NODE (second_rep)->height);
+
+  assert_equals_string (GST_MPD_REPRESENTATION_BASE_NODE (first_rep)->codecs,
+      GST_MPD_REPRESENTATION_BASE_NODE (second_rep)->codecs);
+
+  assert_equals_string (GST_MPD_REPRESENTATION_BASE_NODE
+      (first_rep)->audioSamplingRate,
+      GST_MPD_REPRESENTATION_BASE_NODE (second_rep)->audioSamplingRate);
+
+  /*SegmentList */
+  first_seg_list = (GstMPDSegmentListNode *) first_rep->SegmentList;
+  second_seg_list = (GstMPDSegmentListNode *) second_rep->SegmentList;
+  assert_equals_int (GST_MPD_MULT_SEGMENT_BASE_NODE (first_seg_list)->duration,
+      GST_MPD_MULT_SEGMENT_BASE_NODE (second_seg_list)->duration);
+  assert_equals_int (GST_MPD_MULT_SEGMENT_BASE_NODE
+      (first_seg_list)->startNumber,
+      GST_MPD_MULT_SEGMENT_BASE_NODE (second_seg_list)->startNumber);
+
+  first_seg_url = (GstMPDSegmentURLNode *) first_seg_list->SegmentURL->data;
+  second_seg_url = (GstMPDSegmentURLNode *) second_seg_list->SegmentURL->data;
+
+  assert_equals_string (first_seg_url->media, second_seg_url->media);
+
+
+  gst_mpd_client2_free (first_mpdclient);
+  gst_mpd_client2_free (second_mpdclient);
+}
+
+GST_END_TEST;
+
+/*
+ * create a test suite containing all dash testcases
+ */
+static Suite *
+dash_suite (void)
+{
+  Suite *s = suite_create ("dash");
+  TCase *tc_simpleMPD = tcase_create ("simpleMPD");
+  TCase *tc_complexMPD = tcase_create ("complexMPD");
+  TCase *tc_negativeTests = tcase_create ("negativeTests");
+  TCase *tc_stringTests = tcase_create ("stringTests");
+  TCase *tc_duration = tcase_create ("duration");
+
+  GST_DEBUG_CATEGORY_INIT (gst_dash_demux2_debug, "gst_dash_demux2_debug", 0,
+      "mpeg dashdemux2 tests");
+
+  /* test parsing the simplest possible mpd */
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_validsimplempd);
+
+  /* test parsing the simplest possible mpd */
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_check_mpd_xml_generator);
+
+  /* test mpd client set methods */
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_check_mpd_client_set_methods);
+
+  /* tests parsing attributes from each element type */
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_mpd);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_datetime_with_tz_offset);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_programInformation);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_baseURL);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_location);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_metrics);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_metrics_range);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_metrics_reporting);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_baseURL);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_segmentBase);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentBase_initialization);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentBase_representationIndex);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_segmentList);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentList_multipleSegmentBaseType);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentList_multipleSegmentBaseType_segmentBaseType);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentList_multipleSegmentBaseType_segmentTimeline);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentList_multipleSegmentBaseType_segmentTimeline_s);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentList_multipleSegmentBaseType_bitstreamSwitching);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_segmentList_segmentURL);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_segmentTemplate);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentTemplateWithPresentationTimeOffset);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentTemplate_multipleSegmentBaseType);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentTemplate_multipleSegmentBaseType_segmentBaseType);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentTemplate_multipleSegmentBaseType_segmentTimeline);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentTemplate_multipleSegmentBaseType_segmentTimeline_s);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_segmentTemplate_multipleSegmentBaseType_bitstreamSwitching);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_adaptationSet);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representationBase);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representationBase_framePacking);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_adapt_repr_segmentTemplate_inherit);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representationBase_audioChannelConfiguration);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representationBase_contentProtection);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_contentProtection_no_value);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_contentProtection_no_value_no_encoding);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_accessibility);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_adaptationSet_role);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_adaptationSet_rating);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_adaptationSet_viewpoint);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_contentComponent);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_contentComponent_accessibility);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_contentComponent_role);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_contentComponent_rating);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_contentComponent_viewpoint);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_adaptationSet_baseURL);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_segmentBase);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_segmentBase_initialization);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_segmentBase_representationIndex);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_segmentList);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_segmentTemplate);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_segmentTemplate_inherit);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representation);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representation_representationBase);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representation_baseURL);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representation_subRepresentation);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representation_subRepresentation_representationBase);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representation_segmentBase);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representation_segmentList);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representation_segmentTemplate);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representation_segmentTemplate_inherit);
+  tcase_add_test (tc_simpleMPD,
+      dash_mpdparser_period_adaptationSet_representation_segmentBase_inherit);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_period_subset);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_utctiming);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_utctiming_invalid_value);
+
+  /* tests checking other possible values for attributes */
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_type_dynamic);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_template_parsing);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_isoff_ondemand_profile);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_GstDateTime);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_bitstreamSwitching_inheritance);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_various_duration_formats);
+  tcase_add_test (tc_simpleMPD, dash_mpdparser_default_presentation_delay);
+
+  /* tests checking the MPD management
+   * (eg. setting active streams, obtaining attributes values)
+   */
+  tcase_add_test (tc_complexMPD, dash_mpdparser_setup_media_presentation);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_setup_streaming);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_period_selection);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_period_at_time);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_adaptationSet_handling);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_representation_selection);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_multipleSegmentURL);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_activeStream_selection);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_activeStream_parameters);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_audio_languages);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_baseURL1);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_baseURL2);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_baseURL3);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_baseURL4);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_baseURL5);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_baseURL6);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_baseURL7);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_baseURL8);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_mediaPresentationDuration);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_get_streamPresentationOffset);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_segments);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_headers);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_fragments);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_inherited_segmentBase);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_inherited_segmentURL);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_segment_list);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_segment_template);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_segment_timeline);
+  tcase_add_test (tc_complexMPD, dash_mpdparser_multiple_inherited_segmentURL);
+
+  /* tests checking the parsing of missing/incomplete attributes of xml */
+  tcase_add_test (tc_negativeTests, dash_mpdparser_missing_xml);
+  tcase_add_test (tc_negativeTests, dash_mpdparser_missing_mpd);
+  tcase_add_test (tc_negativeTests, dash_mpdparser_no_end_tag);
+  tcase_add_test (tc_negativeTests, dash_mpdparser_no_profiles);
+  tcase_add_test (tc_negativeTests, dash_mpdparser_no_default_namespace);
+  tcase_add_test (tc_negativeTests,
+      dash_mpdparser_wrong_period_duration_inferred_from_next_period);
+  tcase_add_test (tc_negativeTests,
+      dash_mpdparser_wrong_period_duration_inferred_from_next_mediaPresentationDuration);
+  tcase_add_test (tc_negativeTests, dash_mpdparser_negative_period_duration);
+  tcase_add_test (tc_negativeTests,
+      dash_mpdparser_read_unsigned_from_negative_values);
+  tcase_add_test (tc_negativeTests,
+      dash_mpdparser_negative_mediaPresentationDuration);
+  tcase_add_test (tc_negativeTests,
+      dash_mpdparser_unmatched_segmentTimeline_segmentURL);
+
+  tcase_add_test (tc_stringTests, dash_mpdparser_whitespace_strings);
+  tcase_add_test (tc_stringTests, dash_mpdparser_rfc1738_strings);
+
+  tcase_add_test (tc_duration, dash_mpdparser_duration);
+  tcase_add_test (tc_duration, dash_mpdparser_maximum_segment_duration);
+
+  suite_add_tcase (s, tc_simpleMPD);
+  suite_add_tcase (s, tc_complexMPD);
+  suite_add_tcase (s, tc_negativeTests);
+  suite_add_tcase (s, tc_stringTests);
+  suite_add_tcase (s, tc_duration);
+
+  return s;
+}
+
+GST_CHECK_MAIN (dash);
diff --git a/subprojects/gst-plugins-good/tests/check/elements/hlsdemux_m3u8.c b/subprojects/gst-plugins-good/tests/check/elements/hlsdemux_m3u8.c
new file mode 100644 (file)
index 0000000..a6bf53d
--- /dev/null
@@ -0,0 +1,883 @@
+/* GStreamer
+ *
+ * unit test for hlsdemux
+ *
+ * Copyright (C) <2012> Fluendo S.A <support@fluendo.com>
+ *  Authors: Andoni Morales Alastruey <amorales@fluendo.com>
+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <gst/check/gstcheck.h>
+
+#undef GST_CAT_DEFAULT
+#include "m3u8.h"
+#include "m3u8.c"
+
+GST_DEBUG_CATEGORY (hls_debug);
+
+static const gchar *INVALID_PLAYLIST = "#EXTM3 UINVALID";
+
+static const gchar *ON_DEMAND_PLAYLIST = "#EXTM3U \n\
+#EXT-X-TARGETDURATION:10\n\
+#EXTINF:10,Test\n\
+http://media.example.com/001.ts\n\
+#EXTINF:10,Test\n\
+http://media.example.com/002.ts\n\
+#EXTINF:10,Test\n\
+http://media.example.com/003.ts\n\
+#EXTINF:10,Test\n\
+http://media.example.com/004.ts\n\
+#EXT-X-ENDLIST";
+
+static const gchar *DOUBLES_PLAYLIST = "#EXTM3U \n\
+#EXT-X-TARGETDURATION:10\n\
+#EXTINF:10.321,Test\n\
+http://media.example.com/001.ts\n\
+#EXTINF:9.6789,Test\n\
+http://media.example.com/002.ts\n\
+#EXTINF:10.2344,Test\n\
+http://media.example.com/003.ts\n\
+#EXTINF:9.92,Test\n\
+http://media.example.com/004.ts\n\
+#EXT-X-ENDLIST";
+
+static const gchar *LIVE_PLAYLIST = "#EXTM3U\n\
+#EXT-X-TARGETDURATION:8\n\
+#EXT-X-MEDIA-SEQUENCE:2680\n\
+\n\
+#EXTINF:8,\n\
+https://priv.example.com/fileSequence2680.ts\n\
+#EXTINF:8,\n\
+https://priv.example.com/fileSequence2681.ts\n\
+#EXTINF:8,\n\
+https://priv.example.com/fileSequence2682.ts\n\
+#EXTINF:8,\n\
+https://priv.example.com/fileSequence2683.ts";
+
+static const gchar *LIVE_ROTATED_PLAYLIST = "#EXTM3U\n\
+#EXT-X-TARGETDURATION:8\n\
+#EXT-X-MEDIA-SEQUENCE:3001\n\
+\n\
+#EXTINF:8,\n\
+https://priv.example.com/fileSequence3001.ts\n\
+#EXTINF:8,\n\
+https://priv.example.com/fileSequence3002.ts\n\
+#EXTINF:8,\n\
+https://priv.example.com/fileSequence3003.ts\n\
+#EXTINF:8,\n\
+https://priv.example.com/fileSequence3004.ts";
+
+static const gchar *VARIANT_PLAYLIST = "#EXTM3U \n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=128000\n\
+http://example.com/low.m3u8\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=256000\n\
+http://example.com/mid.m3u8\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=768000\n\
+http://example.com/hi.m3u8\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=65000,CODECS=\"mp4a.40.5\"\n\
+http://example.com/audio-only.m3u8";
+
+static const gchar *VARIANT_PLAYLIST_WITH_URI_MISSING = "#EXTM3U \n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=128000\n\
+http://example.com/low.m3u8\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=256000\n\
+\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=768000\n\
+http://example.com/hi.m3u8\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=65000,CODECS=\"mp4a.40.5\"\n\
+http://example.com/audio-only.m3u8";
+
+static const gchar *EMPTY_LINES_VARIANT_PLAYLIST = "#EXTM3U \n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=128000\n\n\
+http://example.com/low.m3u8\n\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=256000\n\n\
+http://example.com/mid.m3u8\n\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=768000\n\n\
+http://example.com/hi.m3u8\n\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=65000,CODECS=\"mp4a.40.5\"\n\n\
+http://example.com/audio-only.m3u8";
+
+static const gchar *WINDOWS_EMPTY_LINES_VARIANT_PLAYLIST = "#EXTM3U \r\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=128000\r\n\r\n\
+http://example.com/low.m3u8\r\n\r\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=256000\r\n\r\n\
+http://example.com/mid.m3u8\r\n\r\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=768000\r\n\r\n\
+http://example.com/hi.m3u8\r\n\r\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=65000,CODECS=\"mp4a.40.5\"\r\n\r\n\
+http://example.com/audio-only.m3u8";
+
+static const gchar *EMPTY_LINES_PLAYLIST = "#EXTM3U \n\n\
+#EXT-X-TARGETDURATION:10\n\
+#EXTINF:10,Testr\n\n\
+http://media.example.com/001.ts\n\n\
+#EXTINF:10,Test\n\n\
+http://media.example.com/002.ts\n\n\
+#EXTINF:10,Test\n\n\
+http://media.example.com/003.ts\n\n\
+#EXTINF:10,Test\n\n\
+http://media.example.com/004.ts\n\n\
+#EXT-X-ENDLIST";
+
+static const gchar *WINDOWS_EMPTY_LINES_PLAYLIST = "#EXTM3U \r\n\
+#EXT-X-TARGETDURATION:10\r\n\r\n\
+#EXTINF:10,Test\r\n\r\n\
+http://media.example.com/001.ts\r\n\r\n\
+#EXTINF:10,Test\r\n\r\n\
+http://media.example.com/002.ts\r\n\r\n\
+#EXTINF:10,Test\r\n\r\n\
+http://media.example.com/003.ts\r\n\r\n\
+#EXTINF:10,Test\r\n\r\n\
+http://media.example.com/004.ts\r\n\r\n\
+#EXT-X-ENDLIST";
+
+static const gchar *BYTE_RANGES_PLAYLIST = "#EXTM3U \n\
+#EXT-X-TARGETDURATION:40\n\
+#EXTINF:10,Test\n\
+#EXT-X-BYTERANGE:1000@100\n\
+http://media.example.com/all.ts\n\
+#EXTINF:10,Test\n\
+#EXT-X-BYTERANGE:1000@1000\n\
+http://media.example.com/all.ts\n\
+#EXTINF:10,Test\n\
+#EXT-X-BYTERANGE:1000@2000\n\
+http://media.example.com/all.ts\n\
+#EXTINF:10,Test\n\
+#EXT-X-BYTERANGE:1000@3000\n\
+http://media.example.com/all.ts\n\
+#EXT-X-ENDLIST";
+
+static const gchar *BYTE_RANGES_ACC_OFFSET_PLAYLIST = "#EXTM3U \n\
+#EXT-X-TARGETDURATION:40\n\
+#EXTINF:10,Test\n\
+#EXT-X-BYTERANGE:1000\n\
+http://media.example.com/all.ts\n\
+#EXTINF:10,Test\n\
+#EXT-X-BYTERANGE:1000\n\
+http://media.example.com/all.ts\n\
+#EXTINF:10,Test\n\
+#EXT-X-BYTERANGE:1000\n\
+http://media.example.com/all.ts\n\
+#EXTINF:10,Test\n\
+#EXT-X-BYTERANGE:1000\n\
+http://media.example.com/all.ts\n\
+#EXT-X-ENDLIST";
+
+static const gchar *AES_128_ENCRYPTED_PLAYLIST = "#EXTM3U \n\
+#EXT-X-TARGETDURATION:10\n\
+#EXTINF:10,Test\n\
+http://media.example.com/mid/video-only-001.ts\n\
+#EXT-X-KEY:METHOD=NONE\n\
+#EXTINF:10,Test\n\
+http://media.example.com/mid/video-only-002.ts\n\
+#EXT-X-KEY:METHOD=AES-128,URI=\"https://priv.example.com/key.bin\"\n\
+#EXTINF:10,Test\n\
+http://media.example.com/mid/video-only-003.ts\n\
+#EXT-X-KEY:METHOD=AES-128,URI=\"https://priv.example.com/key2.bin\",IV=0x00000000000000000000000000000001\n\
+#EXTINF:10,Test\n\
+http://media.example.com/mid/video-only-004.ts\n\
+#EXTINF:10,Test\n\
+http://media.example.com/mid/video-only-005.ts\n\
+#EXT-X-ENDLIST";
+
+static const gchar *WINDOWS_LINE_ENDINGS_PLAYLIST = "#EXTM3U \r\n\
+#EXT-X-TARGETDURATION:10\r\n\
+#EXTINF:10,Test\r\n\
+http://media.example.com/001.ts\r\n\
+#EXTINF:10,Test\r\n\
+http://media.example.com/002.ts\r\n\
+#EXTINF:10,Test\r\n\
+http://media.example.com/003.ts\r\n\
+#EXTINF:10,Test\r\n\
+http://media.example.com/004.ts\r\n\
+#EXT-X-ENDLIST";
+
+static const gchar *WINDOWS_LINE_ENDINGS_VARIANT_PLAYLIST = "#EXTM3U \r\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=128000\r\n\
+http://example.com/low.m3u8\r\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=256000\r\n\
+http://example.com/mid.m3u8\r\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=768000\r\n\
+http://example.com/hi.m3u8\r\n\
+#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=65000,CODECS=\"mp4a.40.5\"\r\n\
+http://example.com/audio-only.m3u8";
+
+static const gchar *MAP_TAG_PLAYLIST = "#EXTM3U \n\
+#EXT-X-VERSION:7\n\
+#EXT-X-MAP:URI=\"init1.mp4\",BYTERANGE=\"50@50\"\n\
+#EXTINF:6.00000,\n\
+#EXT-X-BYTERANGE:100@50\n\
+main.mp4\n\
+#EXTINF:6.00000,\n\
+#EXT-X-BYTERANGE:100@150\n\
+main.mp4\n\
+#EXT-X-MAP:URI=\"init2.mp4\"\n\
+#EXTINF:6.00000,\n\
+#EXT-X-BYTERANGE:100@300\n\
+main.mp4\n\
+#EXT-X-ENDLIST";
+
+static GstHLSMediaPlaylist *
+load_m3u8 (const gchar * data)
+{
+  GstHLSMediaPlaylist *playlist;
+
+  playlist = gst_hls_media_playlist_parse (g_strdup (data),
+      "http://localhost/test.m3u8", NULL);
+  fail_unless (playlist != NULL);
+
+  return playlist;
+}
+
+static GstHLSMasterPlaylist *
+load_master_playlist (const gchar * data)
+{
+  GstHLSMasterPlaylist *master;
+
+  master = gst_hls_master_playlist_new_from_data (g_strdup (data),
+      "http://localhost/test.m3u8");
+  fail_unless (master != NULL);
+
+  return master;
+}
+
+GST_START_TEST (test_load_main_playlist_invalid)
+{
+  GstHLSMasterPlaylist *master;
+
+  master =
+      gst_hls_master_playlist_new_from_data (g_strdup (INVALID_PLAYLIST), NULL);
+  fail_unless (master == NULL);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_load_main_playlist_rendition)
+{
+  GstHLSMediaPlaylist *playlist;
+
+  playlist = load_m3u8 (ON_DEMAND_PLAYLIST);
+
+  assert_equals_int (playlist->segments->len, 4);
+  assert_equals_int (playlist->version, 1);
+
+  gst_hls_media_playlist_unref (playlist);
+}
+
+GST_END_TEST;
+
+static void
+do_test_load_main_playlist_variant (const gchar * playlist)
+{
+  GstHLSMasterPlaylist *master;
+  GstHLSVariantStream *stream;
+  GList *tmp;
+
+  master = load_master_playlist (playlist);
+
+  assert_equals_int (g_list_length (master->variants), 4);
+
+  /* Audio-Only */
+  tmp = g_list_first (master->variants);
+  stream = tmp->data;
+  assert_equals_int (stream->bandwidth, 65000);
+  assert_equals_int (stream->program_id, 1);
+  assert_equals_string (stream->uri, "http://example.com/audio-only.m3u8");
+  assert_equals_string (stream->codecs, "mp4a.40.5");
+
+  /* Low */
+  tmp = g_list_next (tmp);
+  stream = tmp->data;
+  assert_equals_int (stream->bandwidth, 128000);
+  assert_equals_int (stream->program_id, 1);
+  assert_equals_string (stream->uri, "http://example.com/low.m3u8");
+
+  /* Mid */
+  tmp = g_list_next (tmp);
+  stream = tmp->data;
+  assert_equals_int (stream->bandwidth, 256000);
+  assert_equals_int (stream->program_id, 1);
+  assert_equals_string (stream->uri, "http://example.com/mid.m3u8");
+
+  /* High */
+  tmp = g_list_next (tmp);
+  stream = tmp->data;
+  assert_equals_int (stream->bandwidth, 768000);
+  assert_equals_int (stream->program_id, 1);
+  assert_equals_string (stream->uri, "http://example.com/hi.m3u8");
+
+  /* Check the first playlist is selected */
+  assert_equals_int (master->default_variant != NULL, TRUE);
+  assert_equals_int (master->default_variant->bandwidth, 128000);
+
+  gst_hls_master_playlist_unref (master);
+}
+
+GST_START_TEST (test_load_main_playlist_variant)
+{
+  do_test_load_main_playlist_variant (VARIANT_PLAYLIST);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_load_main_playlist_variant_with_missing_uri)
+{
+  GstHLSMasterPlaylist *master;
+  master = load_master_playlist (VARIANT_PLAYLIST_WITH_URI_MISSING);
+
+  assert_equals_int (g_list_length (master->variants), 3);
+  gst_hls_master_playlist_unref (master);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_load_windows_line_endings_variant_playlist)
+{
+  do_test_load_main_playlist_variant (WINDOWS_LINE_ENDINGS_VARIANT_PLAYLIST);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_load_main_playlist_with_empty_lines)
+{
+  do_test_load_main_playlist_variant (EMPTY_LINES_VARIANT_PLAYLIST);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_load_windows_main_playlist_with_empty_lines)
+{
+  do_test_load_main_playlist_variant (WINDOWS_EMPTY_LINES_VARIANT_PLAYLIST);
+}
+
+GST_END_TEST;
+
+static void
+check_on_demand_playlist (const gchar * data)
+{
+  GstHLSMediaPlaylist *pl;
+  GstM3U8MediaSegment *file;
+
+  pl = load_m3u8 (data);
+
+  /* Sequence should be 0 as it's an ondemand playlist */
+  assert_equals_int (pl->media_sequence, 0);
+  /* Check that we are not live */
+  assert_equals_int (gst_hls_media_playlist_is_live (pl), FALSE);
+  /* Check number of entries */
+  assert_equals_int (pl->segments->len, 4);
+  /* Check first media segments */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 0));
+  assert_equals_string (file->uri, "http://media.example.com/001.ts");
+  assert_equals_int (file->sequence, 0);
+  /* Check last media segments */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 3));
+  assert_equals_string (file->uri, "http://media.example.com/004.ts");
+  assert_equals_int (file->sequence, 3);
+
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_START_TEST (test_on_demand_playlist)
+{
+  check_on_demand_playlist (ON_DEMAND_PLAYLIST);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_windows_line_endings_playlist)
+{
+  check_on_demand_playlist (WINDOWS_LINE_ENDINGS_PLAYLIST);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_empty_lines_playlist)
+{
+  check_on_demand_playlist (EMPTY_LINES_PLAYLIST);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_windows_empty_lines_playlist)
+{
+  check_on_demand_playlist (WINDOWS_EMPTY_LINES_PLAYLIST);
+}
+
+GST_END_TEST;
+
+/* This test is for live streams in which we pause the stream for more than the
+ * DVR window and we resume playback. The playlist has rotated completely and
+ * there is a jump in the media sequence that must be handled correctly. */
+GST_START_TEST (test_live_playlist_rotated)
+{
+  GstHLSMediaPlaylist *pl;
+  GstM3U8MediaSegment *file, *file2;
+
+  pl = load_m3u8 (LIVE_PLAYLIST);
+
+  /* Check first media segment */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 0));
+  assert_equals_int (file->sequence, 2680);
+  gst_m3u8_media_segment_ref (file);
+  gst_hls_media_playlist_unref (pl);
+
+  pl = load_m3u8 (LIVE_ROTATED_PLAYLIST);
+  file2 = gst_hls_media_playlist_sync_to_segment (pl, file);
+  fail_unless (file2 != NULL);
+  gst_m3u8_media_segment_unref (file);
+  gst_m3u8_media_segment_unref (file2);
+
+  /* FIXME: Sequence should last - 3. Should it? */
+  /* Check first media segment */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 0));
+  assert_equals_int (file->sequence, 3001);
+
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_playlist_with_doubles_duration)
+{
+  GstHLSMediaPlaylist *pl;
+  GstM3U8MediaSegment *file;
+  gint64 start = -1;
+  gint64 stop = -1;
+
+  pl = load_m3u8 (DOUBLES_PLAYLIST);
+
+  /* Check first media segments */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 0));
+  assert_equals_float (file->duration / (double) GST_SECOND, 10.321);
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 1));
+  assert_equals_float (file->duration / (double) GST_SECOND, 9.6789);
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 2));
+  assert_equals_float (file->duration / (double) GST_SECOND, 10.2344);
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 3));
+  assert_equals_float (file->duration / (double) GST_SECOND, 9.92);
+  fail_unless (gst_hls_media_playlist_get_seek_range (pl, &start, &stop));
+  assert_equals_int64 (start, 0);
+  assert_equals_float (stop / (double) GST_SECOND,
+      10.321 + 9.6789 + 10.2344 + 9.92);
+
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_playlist_with_encryption)
+{
+  GstHLSMediaPlaylist *pl;
+  GstM3U8MediaSegment *file;
+  guint8 iv1[16] = { 0, };
+  guint8 iv2[16] = { 0, };
+
+  iv1[15] = 1;
+  iv2[15] = 2;
+
+  pl = load_m3u8 (AES_128_ENCRYPTED_PLAYLIST);
+
+  assert_equals_int (pl->segments->len, 5);
+
+  /* Check all media segments */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 0));
+  fail_unless (file->key == NULL);
+
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 1));
+  fail_unless (file->key == NULL);
+
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 2));
+  fail_unless (file->key != NULL);
+  assert_equals_string (file->key, "https://priv.example.com/key.bin");
+  fail_unless (memcmp (&file->iv, iv2, 16) == 0);
+
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 3));
+  fail_unless (file->key != NULL);
+  assert_equals_string (file->key, "https://priv.example.com/key2.bin");
+  fail_unless (memcmp (&file->iv, iv1, 16) == 0);
+
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 4));
+  fail_unless (file->key != NULL);
+  assert_equals_string (file->key, "https://priv.example.com/key2.bin");
+  fail_unless (memcmp (&file->iv, iv1, 16) == 0);
+
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_parse_invalid_playlist)
+{
+  GstHLSMediaPlaylist *pl;
+
+  pl = gst_hls_media_playlist_parse (g_strdup ("#INVALID"),
+      "http://localhost/test.m3u8", NULL);
+  fail_if (pl != NULL);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_sync_playlist_to_segment)
+{
+  GstHLSMediaPlaylist *pl;
+  gchar *live_pl;
+  GstM3U8MediaSegment *file, *file2;
+
+  /* Test updates in live playlists */
+  pl = load_m3u8 (LIVE_PLAYLIST);
+  assert_equals_int (pl->segments->len, 4);
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 0));
+  gst_m3u8_media_segment_ref (file);
+  gst_hls_media_playlist_unref (pl);
+
+  /* Add a new entry to the playlist and check the update */
+  live_pl = g_strdup_printf ("%s\n%s\n%s", LIVE_PLAYLIST, "#EXTINF:8",
+      "https://priv.example.com/fileSequence2684.ts");
+  pl = load_m3u8 (live_pl);
+  fail_unless (pl != NULL);
+  g_free (live_pl);
+  file2 = gst_hls_media_playlist_sync_to_segment (pl, file);
+  fail_unless (file2 != NULL);
+  gst_m3u8_media_segment_unref (file);
+  assert_equals_int (pl->segments->len, 5);
+  gst_hls_media_playlist_unref (pl);
+
+  /* Test sliding window */
+  pl = load_m3u8 (LIVE_PLAYLIST);
+  file = gst_hls_media_playlist_sync_to_segment (pl, file2);
+  fail_unless (file != NULL);
+  gst_m3u8_media_segment_unref (file);
+  gst_m3u8_media_segment_unref (file2);
+  assert_equals_int (pl->segments->len, 4);
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_playlist_media_files)
+{
+  GstHLSMediaPlaylist *pl;
+  GstM3U8MediaSegment *file;
+
+  pl = load_m3u8 (ON_DEMAND_PLAYLIST);
+
+  /* Check number of entries */
+  assert_equals_int (pl->segments->len, 4);
+  /* Check first media segments */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 0));
+  assert_equals_string (file->uri, "http://media.example.com/001.ts");
+  assert_equals_int (file->sequence, 0);
+  assert_equals_float (file->duration, 10 * (double) GST_SECOND);
+  assert_equals_int (file->offset, 0);
+  assert_equals_int (file->size, -1);
+  assert_equals_string (file->title, "Test");
+
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_playlist_byte_range_media_files)
+{
+  GstHLSMediaPlaylist *pl;
+  GstM3U8MediaSegment *file;
+
+  pl = load_m3u8 (BYTE_RANGES_PLAYLIST);
+
+  /* Check number of entries */
+  assert_equals_int (pl->segments->len, 4);
+  /* Check first media segments */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 0));
+  assert_equals_string (file->uri, "http://media.example.com/all.ts");
+  assert_equals_int (file->sequence, 0);
+  assert_equals_float (file->duration, 10 * (double) GST_SECOND);
+  assert_equals_int (file->offset, 100);
+  assert_equals_int (file->size, 1000);
+  /* Check last media segments */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 3));
+  assert_equals_string (file->uri, "http://media.example.com/all.ts");
+  assert_equals_int (file->sequence, 3);
+  assert_equals_float (file->duration, 10 * (double) GST_SECOND);
+  assert_equals_int (file->offset, 3000);
+  assert_equals_int (file->size, 1000);
+
+  gst_hls_media_playlist_unref (pl);
+  pl = load_m3u8 (BYTE_RANGES_ACC_OFFSET_PLAYLIST);
+
+  /* Check number of entries */
+  assert_equals_int (pl->segments->len, 4);
+  /* Check first media segments */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 0));
+  assert_equals_string (file->uri, "http://media.example.com/all.ts");
+  assert_equals_int (file->sequence, 0);
+  assert_equals_float (file->duration, 10 * (double) GST_SECOND);
+  assert_equals_int (file->offset, 0);
+  assert_equals_int (file->size, 1000);
+  /* Check last media segments */
+  file = GST_M3U8_MEDIA_SEGMENT (g_ptr_array_index (pl->segments, 3));
+  assert_equals_string (file->uri, "http://media.example.com/all.ts");
+  assert_equals_int (file->sequence, 3);
+  assert_equals_float (file->duration, 10 * (double) GST_SECOND);
+  assert_equals_int (file->offset, 3000);
+  assert_equals_int (file->size, 1000);
+
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_advance_fragment)
+{
+  GstHLSMediaPlaylist *pl;
+  GstM3U8MediaSegment *mf;
+
+  pl = load_m3u8 (BYTE_RANGES_PLAYLIST);
+
+  /* Check the next fragment */
+  mf = gst_hls_media_playlist_get_starting_segment (pl);
+  fail_unless (mf != NULL);
+  assert_equals_int (mf->discont, FALSE);
+  assert_equals_string (mf->uri, "http://media.example.com/all.ts");
+  assert_equals_uint64 (mf->stream_time, 0);
+  assert_equals_uint64 (mf->duration, 10 * GST_SECOND);
+  assert_equals_uint64 (mf->offset, 100);
+  assert_equals_uint64 (mf->offset + mf->size, 1100);
+  gst_m3u8_media_segment_unref (mf);
+
+  /* Check next media segments */
+  mf = gst_hls_media_playlist_advance_fragment (pl, mf, TRUE);
+  fail_unless (mf != NULL);
+  assert_equals_int (mf->discont, FALSE);
+  assert_equals_string (mf->uri, "http://media.example.com/all.ts");
+  assert_equals_uint64 (mf->stream_time, 10 * GST_SECOND);
+  assert_equals_uint64 (mf->duration, 10 * GST_SECOND);
+  assert_equals_uint64 (mf->offset, 1000);
+  assert_equals_uint64 (mf->offset + mf->size, 2000);
+  gst_m3u8_media_segment_unref (mf);
+
+  /* Check next media segments */
+  mf = gst_hls_media_playlist_advance_fragment (pl, mf, TRUE);
+  assert_equals_int (mf->discont, FALSE);
+  assert_equals_string (mf->uri, "http://media.example.com/all.ts");
+  assert_equals_uint64 (mf->stream_time, 20 * GST_SECOND);
+  assert_equals_uint64 (mf->duration, 10 * GST_SECOND);
+  assert_equals_uint64 (mf->offset, 2000);
+  assert_equals_uint64 (mf->offset + mf->size, 3000);
+  gst_m3u8_media_segment_unref (mf);
+
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_get_duration)
+{
+  GstHLSMediaPlaylist *pl;
+
+  /* Test duration for on-demand playlists */
+  pl = load_m3u8 (ON_DEMAND_PLAYLIST);
+  assert_equals_uint64 (gst_hls_media_playlist_get_duration (pl),
+      40 * GST_SECOND);
+  gst_hls_media_playlist_unref (pl);
+
+  /* Test duration for live playlists */
+  pl = load_m3u8 (LIVE_PLAYLIST);
+  assert_equals_uint64 (gst_hls_media_playlist_get_duration (pl),
+      GST_CLOCK_TIME_NONE);
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_get_target_duration)
+{
+  GstHLSMediaPlaylist *pl;
+
+  pl = load_m3u8 (ON_DEMAND_PLAYLIST);
+  assert_equals_uint64 (pl->targetduration, 10 * GST_SECOND);
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_END_TEST;
+
+
+GST_START_TEST (test_get_stream_for_bitrate)
+{
+  GstHLSMasterPlaylist *master;
+  GstHLSVariantStream *stream;
+
+  master = load_master_playlist (VARIANT_PLAYLIST);
+  stream = gst_hls_master_playlist_get_variant_for_bitrate (master, NULL, 0, 0);
+
+  assert_equals_int (stream->bandwidth, 65000);
+
+  stream =
+      gst_hls_master_playlist_get_variant_for_bitrate (master, NULL,
+      G_MAXINT32, 0);
+  assert_equals_int (stream->bandwidth, 768000);
+  stream =
+      gst_hls_master_playlist_get_variant_for_bitrate (master, NULL, 300000, 0);
+  assert_equals_int (stream->bandwidth, 256000);
+  stream =
+      gst_hls_master_playlist_get_variant_for_bitrate (master, NULL, 500000, 0);
+  assert_equals_int (stream->bandwidth, 256000);
+  stream =
+      gst_hls_master_playlist_get_variant_for_bitrate (master, NULL, 255000, 0);
+  assert_equals_int (stream->bandwidth, 128000);
+
+  gst_hls_master_playlist_unref (master);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_url_with_slash_query_param)
+{
+  static const gchar *MASTER_PLAYLIST = "#EXTM3U \n"
+      "#EXT-X-VERSION:4\n"
+      "#EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=1251135, CODECS=\"avc1.42001f, mp4a.40.2\", RESOLUTION=640x352\n"
+      "1251/media.m3u8?acl=/*1054559_h264_1500k.mp4\n";
+  GstHLSMasterPlaylist *master;
+  GstHLSVariantStream *stream;
+
+  master = load_master_playlist (MASTER_PLAYLIST);
+
+  assert_equals_int (g_list_length (master->variants), 1);
+  stream = g_list_nth_data (master->variants, 0);
+  assert_equals_string (stream->uri,
+      "http://localhost/1251/media.m3u8?acl=/*1054559_h264_1500k.mp4");
+
+  gst_hls_master_playlist_unref (master);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_stream_inf_tag)
+{
+  static const gchar *MASTER_PLAYLIST = "#EXTM3U \n"
+      "#EXT-X-VERSION:4\n"
+      "#EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=1251135, CODECS=\"avc1.42001f, mp4a.40.2\", RESOLUTION=640x352\n"
+      "media.m3u8\n";
+  GstHLSMasterPlaylist *master;
+  GstHLSVariantStream *stream;
+
+  master = load_master_playlist (MASTER_PLAYLIST);
+
+  assert_equals_int (g_list_length (master->variants), 1);
+  stream = g_list_nth_data (master->variants, 0);
+
+  assert_equals_int64 (stream->program_id, 1);
+  assert_equals_int64 (stream->width, 640);
+  assert_equals_int64 (stream->height, 352);
+  assert_equals_int64 (stream->bandwidth, 1251135);
+  assert_equals_string (stream->codecs, "avc1.42001f, mp4a.40.2");
+  gst_hls_master_playlist_unref (master);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_map_tag)
+{
+  GstHLSMediaPlaylist *pl;
+  GPtrArray *segments;
+  GstM3U8MediaSegment *seg1, *seg2, *seg3;
+  GstM3U8InitFile *init1, *init2;
+
+  /* Test EXT-X-MAP tag
+   * This M3U8 has two EXT-X-MAP tag.
+   * the first one is applied to the 1st and 2nd segments, and the other is
+   * applied only to the 3rd segment
+   */
+
+  pl = load_m3u8 (MAP_TAG_PLAYLIST);
+  segments = pl->segments;
+
+  assert_equals_int (segments->len, 3);
+
+  for (gsize i = 0; i < segments->len; i++) {
+    GstM3U8MediaSegment *file = g_ptr_array_index (segments, i);
+
+    GstM3U8InitFile *init_file = file->init_file;
+    fail_unless (init_file != NULL);
+    fail_unless (init_file->uri != NULL);
+  }
+
+  seg1 = g_ptr_array_index (segments, 0);
+  seg2 = g_ptr_array_index (segments, 1);
+  seg3 = g_ptr_array_index (segments, 2);
+
+  /* Segment 1 and 2 share the identical init segment */
+  fail_unless (seg1->init_file == seg2->init_file);
+  assert_equals_int (seg1->init_file->ref_count, 2);
+
+  fail_unless (seg2->init_file != seg3->init_file);
+  assert_equals_int (seg3->init_file->ref_count, 1);
+
+  init1 = seg1->init_file;
+  init2 = seg3->init_file;
+
+  fail_unless (g_strcmp0 (init1->uri, init2->uri));
+  assert_equals_int (init1->offset, 50);
+  assert_equals_int (init1->size, 50);
+
+  assert_equals_int (init2->offset, 0);
+  assert_equals_int (init2->size, -1);
+
+  gst_hls_media_playlist_unref (pl);
+}
+
+GST_END_TEST;
+
+static Suite *
+hlsdemux_suite (void)
+{
+  Suite *s = suite_create ("hlsdemux_m3u8");
+  TCase *tc_m3u8 = tcase_create ("m3u8client");
+
+  GST_DEBUG_CATEGORY_INIT (hls_debug, "hlsdemux_m3u", 0, "hlsdemux m3u test");
+
+  suite_add_tcase (s, tc_m3u8);
+  tcase_add_test (tc_m3u8, test_load_main_playlist_invalid);
+  tcase_add_test (tc_m3u8, test_load_main_playlist_rendition);
+  tcase_add_test (tc_m3u8, test_load_main_playlist_variant);
+  tcase_add_test (tc_m3u8, test_load_main_playlist_variant_with_missing_uri);
+  tcase_add_test (tc_m3u8, test_load_windows_line_endings_variant_playlist);
+  tcase_add_test (tc_m3u8, test_load_main_playlist_with_empty_lines);
+  tcase_add_test (tc_m3u8, test_load_windows_main_playlist_with_empty_lines);
+  tcase_add_test (tc_m3u8, test_on_demand_playlist);
+  tcase_add_test (tc_m3u8, test_windows_line_endings_playlist);
+  tcase_add_test (tc_m3u8, test_windows_empty_lines_playlist);
+  tcase_add_test (tc_m3u8, test_empty_lines_playlist);
+  tcase_add_test (tc_m3u8, test_live_playlist_rotated);
+  tcase_add_test (tc_m3u8, test_playlist_with_doubles_duration);
+  tcase_add_test (tc_m3u8, test_playlist_with_encryption);
+  tcase_add_test (tc_m3u8, test_parse_invalid_playlist);
+  tcase_add_test (tc_m3u8, test_sync_playlist_to_segment);
+  tcase_add_test (tc_m3u8, test_playlist_media_files);
+  tcase_add_test (tc_m3u8, test_playlist_byte_range_media_files);
+  tcase_add_test (tc_m3u8, test_advance_fragment);
+  tcase_add_test (tc_m3u8, test_get_duration);
+  tcase_add_test (tc_m3u8, test_get_target_duration);
+  tcase_add_test (tc_m3u8, test_get_stream_for_bitrate);
+  tcase_add_test (tc_m3u8, test_url_with_slash_query_param);
+  tcase_add_test (tc_m3u8, test_stream_inf_tag);
+  tcase_add_test (tc_m3u8, test_map_tag);
+  return s;
+}
+
+GST_CHECK_MAIN (hlsdemux);
index 5625d17..1c9f393 100644 (file)
@@ -41,6 +41,7 @@ good_tests = [
   [ 'elements/dtmf' ],
   [ 'elements/flvdemux' ],
   [ 'elements/flvmux' ],
+  [ 'elements/hlsdemux_m3u8' , not hls_dep.found() or not adaptivedemux2_dep.found(), [hls_dep, adaptivedemux2_dep] ],
   [ 'elements/mulawdec' ],
   [ 'elements/mulawenc' ],
   [ 'elements/icydemux' ],
@@ -129,6 +130,7 @@ libsoup3_dep = dependency('libsoup-3.0', required : false,
 # FIXME: unistd dependency or not tested yet on windows
 if host_machine.system() != 'windows'
   good_tests += [
+    [ 'elements/dash_mpd', not adaptivedemux2_dep.found(), [adaptivedemux2_dep] ],
     [ 'pipelines/flacdec', not flac_dep.found() ],
     [ 'elements/gdkpixbufsink', not gdkpixbuf_dep.found(), [gdkpixbuf_dep] ],
     [ 'elements/gdkpixbufoverlay', not gdkpixbuf_dep.found() ],